var/home/core/zuul-output/0000755000175000017500000000000015114613231014522 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015114624422015472 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004775174215114624413017717 0ustar rootrootDec 05 17:33:12 crc systemd[1]: Starting Kubernetes Kubelet... Dec 05 17:33:12 crc restorecon[4699]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 17:33:12 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Dec 05 17:33:13 crc restorecon[4699]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Dec 05 17:33:14 crc kubenswrapper[4961]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:33:14 crc kubenswrapper[4961]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Dec 05 17:33:14 crc kubenswrapper[4961]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:33:14 crc kubenswrapper[4961]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:33:14 crc kubenswrapper[4961]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Dec 05 17:33:14 crc kubenswrapper[4961]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.553043 4961 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558157 4961 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558191 4961 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558200 4961 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558207 4961 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558213 4961 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558220 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558226 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558231 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558236 4961 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558241 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558247 4961 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558252 4961 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558285 4961 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558291 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558296 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558301 4961 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558307 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558312 4961 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558317 4961 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558323 4961 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558328 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558333 4961 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558338 4961 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558343 4961 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558348 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558353 4961 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558358 4961 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558364 4961 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558369 4961 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558374 4961 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558379 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558384 4961 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558389 4961 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558394 4961 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558399 4961 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558417 4961 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558422 4961 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558437 4961 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558443 4961 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558448 4961 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558454 4961 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558459 4961 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558464 4961 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558469 4961 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558474 4961 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558482 4961 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558488 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558495 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558503 4961 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558510 4961 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558516 4961 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558521 4961 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558527 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558534 4961 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558539 4961 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558544 4961 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558549 4961 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558555 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558561 4961 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558566 4961 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558572 4961 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558577 4961 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558582 4961 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558587 4961 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558594 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558599 4961 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558604 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558610 4961 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558617 4961 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558624 4961 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.558630 4961 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558759 4961 flags.go:64] FLAG: --address="0.0.0.0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558795 4961 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558808 4961 flags.go:64] FLAG: --anonymous-auth="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558817 4961 flags.go:64] FLAG: --application-metrics-count-limit="100" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558825 4961 flags.go:64] FLAG: --authentication-token-webhook="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558832 4961 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558841 4961 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558850 4961 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558857 4961 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558863 4961 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558870 4961 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558876 4961 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558882 4961 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558889 4961 flags.go:64] FLAG: --cgroup-root="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558895 4961 flags.go:64] FLAG: --cgroups-per-qos="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558901 4961 flags.go:64] FLAG: --client-ca-file="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558907 4961 flags.go:64] FLAG: --cloud-config="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558913 4961 flags.go:64] FLAG: --cloud-provider="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558921 4961 flags.go:64] FLAG: --cluster-dns="[]" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558928 4961 flags.go:64] FLAG: --cluster-domain="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558934 4961 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558940 4961 flags.go:64] FLAG: --config-dir="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558946 4961 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558953 4961 flags.go:64] FLAG: --container-log-max-files="5" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558963 4961 flags.go:64] FLAG: --container-log-max-size="10Mi" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558969 4961 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558975 4961 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558983 4961 flags.go:64] FLAG: --containerd-namespace="k8s.io" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558989 4961 flags.go:64] FLAG: --contention-profiling="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.558996 4961 flags.go:64] FLAG: --cpu-cfs-quota="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559002 4961 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559009 4961 flags.go:64] FLAG: --cpu-manager-policy="none" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559015 4961 flags.go:64] FLAG: --cpu-manager-policy-options="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559022 4961 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559029 4961 flags.go:64] FLAG: --enable-controller-attach-detach="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559035 4961 flags.go:64] FLAG: --enable-debugging-handlers="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559041 4961 flags.go:64] FLAG: --enable-load-reader="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559047 4961 flags.go:64] FLAG: --enable-server="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559054 4961 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559063 4961 flags.go:64] FLAG: --event-burst="100" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559070 4961 flags.go:64] FLAG: --event-qps="50" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559076 4961 flags.go:64] FLAG: --event-storage-age-limit="default=0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559082 4961 flags.go:64] FLAG: --event-storage-event-limit="default=0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559088 4961 flags.go:64] FLAG: --eviction-hard="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559097 4961 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559103 4961 flags.go:64] FLAG: --eviction-minimum-reclaim="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559110 4961 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559116 4961 flags.go:64] FLAG: --eviction-soft="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559122 4961 flags.go:64] FLAG: --eviction-soft-grace-period="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559129 4961 flags.go:64] FLAG: --exit-on-lock-contention="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559137 4961 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559145 4961 flags.go:64] FLAG: --experimental-mounter-path="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559153 4961 flags.go:64] FLAG: --fail-cgroupv1="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559161 4961 flags.go:64] FLAG: --fail-swap-on="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559169 4961 flags.go:64] FLAG: --feature-gates="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559178 4961 flags.go:64] FLAG: --file-check-frequency="20s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559185 4961 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559193 4961 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559269 4961 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559278 4961 flags.go:64] FLAG: --healthz-port="10248" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559285 4961 flags.go:64] FLAG: --help="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559291 4961 flags.go:64] FLAG: --hostname-override="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559297 4961 flags.go:64] FLAG: --housekeeping-interval="10s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559304 4961 flags.go:64] FLAG: --http-check-frequency="20s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559310 4961 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559318 4961 flags.go:64] FLAG: --image-credential-provider-config="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559324 4961 flags.go:64] FLAG: --image-gc-high-threshold="85" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559331 4961 flags.go:64] FLAG: --image-gc-low-threshold="80" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559338 4961 flags.go:64] FLAG: --image-service-endpoint="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559344 4961 flags.go:64] FLAG: --kernel-memcg-notification="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559350 4961 flags.go:64] FLAG: --kube-api-burst="100" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559357 4961 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559363 4961 flags.go:64] FLAG: --kube-api-qps="50" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559369 4961 flags.go:64] FLAG: --kube-reserved="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559375 4961 flags.go:64] FLAG: --kube-reserved-cgroup="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559381 4961 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559388 4961 flags.go:64] FLAG: --kubelet-cgroups="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559394 4961 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559401 4961 flags.go:64] FLAG: --lock-file="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559407 4961 flags.go:64] FLAG: --log-cadvisor-usage="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559413 4961 flags.go:64] FLAG: --log-flush-frequency="5s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559419 4961 flags.go:64] FLAG: --log-json-info-buffer-size="0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559431 4961 flags.go:64] FLAG: --log-json-split-stream="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559437 4961 flags.go:64] FLAG: --log-text-info-buffer-size="0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559444 4961 flags.go:64] FLAG: --log-text-split-stream="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559450 4961 flags.go:64] FLAG: --logging-format="text" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559456 4961 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559463 4961 flags.go:64] FLAG: --make-iptables-util-chains="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559470 4961 flags.go:64] FLAG: --manifest-url="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559476 4961 flags.go:64] FLAG: --manifest-url-header="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559484 4961 flags.go:64] FLAG: --max-housekeeping-interval="15s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559491 4961 flags.go:64] FLAG: --max-open-files="1000000" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559499 4961 flags.go:64] FLAG: --max-pods="110" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559505 4961 flags.go:64] FLAG: --maximum-dead-containers="-1" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559511 4961 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559518 4961 flags.go:64] FLAG: --memory-manager-policy="None" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559524 4961 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559531 4961 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559539 4961 flags.go:64] FLAG: --node-ip="192.168.126.11" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559545 4961 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559562 4961 flags.go:64] FLAG: --node-status-max-images="50" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559569 4961 flags.go:64] FLAG: --node-status-update-frequency="10s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559576 4961 flags.go:64] FLAG: --oom-score-adj="-999" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559583 4961 flags.go:64] FLAG: --pod-cidr="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559589 4961 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559599 4961 flags.go:64] FLAG: --pod-manifest-path="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559606 4961 flags.go:64] FLAG: --pod-max-pids="-1" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559612 4961 flags.go:64] FLAG: --pods-per-core="0" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559618 4961 flags.go:64] FLAG: --port="10250" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559625 4961 flags.go:64] FLAG: --protect-kernel-defaults="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559631 4961 flags.go:64] FLAG: --provider-id="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559637 4961 flags.go:64] FLAG: --qos-reserved="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559644 4961 flags.go:64] FLAG: --read-only-port="10255" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559651 4961 flags.go:64] FLAG: --register-node="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559658 4961 flags.go:64] FLAG: --register-schedulable="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559664 4961 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559676 4961 flags.go:64] FLAG: --registry-burst="10" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559682 4961 flags.go:64] FLAG: --registry-qps="5" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559689 4961 flags.go:64] FLAG: --reserved-cpus="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559696 4961 flags.go:64] FLAG: --reserved-memory="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559707 4961 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559724 4961 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559734 4961 flags.go:64] FLAG: --rotate-certificates="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559742 4961 flags.go:64] FLAG: --rotate-server-certificates="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559749 4961 flags.go:64] FLAG: --runonce="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559757 4961 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559765 4961 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559798 4961 flags.go:64] FLAG: --seccomp-default="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559807 4961 flags.go:64] FLAG: --serialize-image-pulls="true" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559816 4961 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559823 4961 flags.go:64] FLAG: --storage-driver-db="cadvisor" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559833 4961 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559841 4961 flags.go:64] FLAG: --storage-driver-password="root" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559849 4961 flags.go:64] FLAG: --storage-driver-secure="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559857 4961 flags.go:64] FLAG: --storage-driver-table="stats" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559865 4961 flags.go:64] FLAG: --storage-driver-user="root" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559873 4961 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559882 4961 flags.go:64] FLAG: --sync-frequency="1m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559890 4961 flags.go:64] FLAG: --system-cgroups="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559898 4961 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559912 4961 flags.go:64] FLAG: --system-reserved-cgroup="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559919 4961 flags.go:64] FLAG: --tls-cert-file="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559924 4961 flags.go:64] FLAG: --tls-cipher-suites="[]" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559933 4961 flags.go:64] FLAG: --tls-min-version="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559940 4961 flags.go:64] FLAG: --tls-private-key-file="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559946 4961 flags.go:64] FLAG: --topology-manager-policy="none" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559952 4961 flags.go:64] FLAG: --topology-manager-policy-options="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559960 4961 flags.go:64] FLAG: --topology-manager-scope="container" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559966 4961 flags.go:64] FLAG: --v="2" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559975 4961 flags.go:64] FLAG: --version="false" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559983 4961 flags.go:64] FLAG: --vmodule="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559993 4961 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.559999 4961 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560102 4961 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560108 4961 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560114 4961 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560119 4961 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560124 4961 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560129 4961 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560476 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560517 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560521 4961 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560524 4961 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560528 4961 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560532 4961 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560536 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560540 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560543 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560548 4961 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560551 4961 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560555 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.560559 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562282 4961 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562301 4961 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562312 4961 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562321 4961 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562328 4961 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562335 4961 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562342 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562348 4961 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562356 4961 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562366 4961 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562374 4961 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562381 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562393 4961 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562399 4961 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562406 4961 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562412 4961 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562420 4961 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562426 4961 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562433 4961 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562442 4961 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562449 4961 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562455 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562462 4961 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562468 4961 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562478 4961 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562484 4961 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562490 4961 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562497 4961 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562504 4961 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562510 4961 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562518 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562524 4961 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562534 4961 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562540 4961 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562547 4961 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562553 4961 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562566 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562572 4961 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562578 4961 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562584 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562590 4961 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562596 4961 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562602 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562608 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562614 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562620 4961 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562626 4961 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562632 4961 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562638 4961 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562647 4961 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562653 4961 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.562663 4961 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.562678 4961 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.574855 4961 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.574896 4961 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.574985 4961 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.574994 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575001 4961 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575008 4961 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575013 4961 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575019 4961 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575025 4961 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575032 4961 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575041 4961 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575049 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575056 4961 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575062 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575068 4961 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575074 4961 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575081 4961 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575086 4961 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575092 4961 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575099 4961 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575105 4961 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575112 4961 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575119 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575125 4961 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575131 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575136 4961 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575141 4961 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575149 4961 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575155 4961 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575161 4961 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575166 4961 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575171 4961 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575177 4961 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575182 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575189 4961 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575197 4961 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575203 4961 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575209 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575217 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575224 4961 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575230 4961 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575237 4961 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575244 4961 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575251 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575257 4961 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575263 4961 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575269 4961 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575274 4961 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575281 4961 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575286 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575292 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575298 4961 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575303 4961 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575309 4961 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575314 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575320 4961 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575325 4961 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575331 4961 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575337 4961 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575342 4961 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575347 4961 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575353 4961 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575359 4961 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575364 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575370 4961 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575376 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575381 4961 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575386 4961 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575391 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575398 4961 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575405 4961 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575412 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575417 4961 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.575426 4961 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575609 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575621 4961 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575629 4961 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575636 4961 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575642 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575649 4961 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575655 4961 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575661 4961 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575668 4961 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575674 4961 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575682 4961 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575689 4961 feature_gate.go:330] unrecognized feature gate: GatewayAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575695 4961 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575700 4961 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575707 4961 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575714 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575719 4961 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575725 4961 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575731 4961 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575739 4961 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575745 4961 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575751 4961 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575757 4961 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575762 4961 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575768 4961 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575794 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575803 4961 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575810 4961 feature_gate.go:330] unrecognized feature gate: NewOLM Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575819 4961 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575827 4961 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575833 4961 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575840 4961 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575845 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575852 4961 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575857 4961 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575862 4961 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575868 4961 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575873 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575879 4961 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575884 4961 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575889 4961 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575895 4961 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575900 4961 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575906 4961 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575911 4961 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575917 4961 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575922 4961 feature_gate.go:330] unrecognized feature gate: OVNObservability Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575929 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575935 4961 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575940 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575945 4961 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575951 4961 feature_gate.go:330] unrecognized feature gate: PlatformOperators Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575960 4961 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575965 4961 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575971 4961 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575976 4961 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575982 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575988 4961 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575993 4961 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.575998 4961 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576003 4961 feature_gate.go:330] unrecognized feature gate: SignatureStores Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576009 4961 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576015 4961 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576021 4961 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576027 4961 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576032 4961 feature_gate.go:330] unrecognized feature gate: PinnedImages Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576037 4961 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576043 4961 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576049 4961 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576054 4961 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.576059 4961 feature_gate.go:330] unrecognized feature gate: Example Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.576067 4961 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.576570 4961 server.go:940] "Client rotation is on, will bootstrap in background" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.579922 4961 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.580026 4961 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.580753 4961 server.go:997] "Starting client certificate rotation" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.580822 4961 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.581012 4961 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2025-12-15 23:03:27.814582976 +0000 UTC Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.581124 4961 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 245h30m13.233462599s for next certificate rotation Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.591883 4961 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.594745 4961 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.606859 4961 log.go:25] "Validated CRI v1 runtime API" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.621916 4961 log.go:25] "Validated CRI v1 image API" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.623710 4961 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.629019 4961 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-12-05-17-29-06-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.629059 4961 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:41 fsType:tmpfs blockSize:0}] Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.649217 4961 manager.go:217] Machine: {Timestamp:2025-12-05 17:33:14.646425961 +0000 UTC m=+0.707576534 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2799998 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:76caf0b9-12fa-49d9-8944-44d70ddec643 BootID:f7e99468-3bda-480f-aff7-5c637658e842 Filesystems:[{Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:41 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:cf:e7:09 Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:cf:e7:09 Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:7f:55:be Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:1e:bc:23 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:ca:6f:74 Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:b7:6d:c7 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:3e:84:cd:bb:70:42 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:0a:6e:53:3b:e5:48 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.649444 4961 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.649592 4961 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.653819 4961 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654016 4961 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654058 4961 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654258 4961 topology_manager.go:138] "Creating topology manager with none policy" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654269 4961 container_manager_linux.go:303] "Creating device plugin manager" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654451 4961 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654488 4961 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654794 4961 state_mem.go:36] "Initialized new in-memory state store" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.654878 4961 server.go:1245] "Using root directory" path="/var/lib/kubelet" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.658638 4961 kubelet.go:418] "Attempting to sync node with API server" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.658671 4961 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.658696 4961 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.658710 4961 kubelet.go:324] "Adding apiserver pod source" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.658755 4961 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.668149 4961 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.668558 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.668764 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.668912 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.669059 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.707360 4961 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708087 4961 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708697 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708738 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708748 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708756 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708790 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708801 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708810 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708827 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708840 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708850 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708904 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.708916 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.709129 4961 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.710194 4961 server.go:1280] "Started kubelet" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.710853 4961 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.710910 4961 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.711536 4961 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Dec 05 17:33:14 crc systemd[1]: Started Kubernetes Kubelet. Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805128 4961 server.go:460] "Adding debug handlers to kubelet server" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805344 4961 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805400 4961 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805474 4961 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805863 4961 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 10:46:22.020749949 +0000 UTC Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805929 4961 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 713h13m7.214825619s for next certificate rotation Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805968 4961 volume_manager.go:287] "The desired_state_of_world populator starts" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.805991 4961 volume_manager.go:289] "Starting Kubelet Volume Manager" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.806225 4961 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.806688 4961 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.810517 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="200ms" Dec 05 17:33:14 crc kubenswrapper[4961]: W1205 17:33:14.810512 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.810604 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810656 4961 factory.go:153] Registering CRI-O factory Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810681 4961 factory.go:221] Registration of the crio container factory successfully Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810741 4961 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810753 4961 factory.go:55] Registering systemd factory Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810760 4961 factory.go:221] Registration of the systemd container factory successfully Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810937 4961 factory.go:103] Registering Raw factory Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.810971 4961 manager.go:1196] Started watching for new ooms in manager Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.811575 4961 manager.go:319] Starting recovery of all containers Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.810704 4961 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.17:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187e622044dd652b default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:33:14.709648683 +0000 UTC m=+0.770799156,LastTimestamp:2025-12-05 17:33:14.709648683 +0000 UTC m=+0.770799156,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822488 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822837 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822857 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822877 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822895 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822914 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822936 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822955 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822974 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.822992 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823010 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823027 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823047 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823071 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823089 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823110 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823132 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823153 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823177 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823196 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823217 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823235 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.823258 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826147 4961 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826193 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826217 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826236 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826256 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826281 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826300 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826317 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826369 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826394 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826411 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826428 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826447 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826466 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826485 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826503 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826519 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826536 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826553 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826568 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826587 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826606 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826625 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826643 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826662 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826683 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826703 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826724 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826744 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826762 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826813 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826836 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826856 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826873 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826892 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826912 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826930 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826946 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826964 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.826985 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827005 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827023 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827039 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827055 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827072 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827092 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827111 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827127 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827142 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827159 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827177 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827193 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827211 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827231 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827249 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827266 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827287 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827305 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827324 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827342 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827361 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827378 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827443 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827468 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827485 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827502 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827518 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827536 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827553 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827570 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827585 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827600 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827615 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827636 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827658 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827678 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827699 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827718 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827739 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827761 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827805 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827827 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827895 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827922 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827948 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827971 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.827989 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828009 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828029 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828052 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828074 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828095 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828112 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828130 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828149 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828202 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828224 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828243 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828261 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828278 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828296 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828315 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828333 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828352 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828370 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828396 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828415 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828433 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828452 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828472 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828513 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828531 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828551 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828570 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828590 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828609 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828633 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828651 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828672 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828692 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.828712 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829286 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829338 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829374 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829396 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829419 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829576 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829611 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829642 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829664 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829685 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829710 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.829731 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830019 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830056 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830076 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830102 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830127 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830155 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830174 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830196 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830237 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830260 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830290 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830311 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830333 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830357 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830380 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830402 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830428 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830449 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830477 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830407 4961 manager.go:324] Recovery completed Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830499 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830962 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.830994 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831028 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831052 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831068 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831087 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831100 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831126 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831145 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831161 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831182 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831194 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831220 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831242 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831264 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831294 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831312 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831337 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831349 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831363 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831380 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831396 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831416 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831430 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831444 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831460 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831472 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831489 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831501 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831513 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831531 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831544 4961 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831559 4961 reconstruct.go:97] "Volume reconstruction finished" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.831569 4961 reconciler.go:26] "Reconciler: start to sync state" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.843305 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.844935 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.845001 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.845014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.846111 4961 cpu_manager.go:225] "Starting CPU manager" policy="none" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.846137 4961 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.846159 4961 state_mem.go:36] "Initialized new in-memory state store" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.860275 4961 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.861649 4961 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.861752 4961 status_manager.go:217] "Starting to sync pod status with apiserver" Dec 05 17:33:14 crc kubenswrapper[4961]: I1205 17:33:14.861868 4961 kubelet.go:2335] "Starting kubelet main sync loop" Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.862378 4961 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.906922 4961 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:33:14 crc kubenswrapper[4961]: E1205 17:33:14.963168 4961 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.007434 4961 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.011417 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="400ms" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.107757 4961 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:33:15 crc kubenswrapper[4961]: W1205 17:33:15.159761 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.159971 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.164348 4961 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.207909 4961 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.297037 4961 policy_none.go:49] "None policy: Start" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.298073 4961 memory_manager.go:170] "Starting memorymanager" policy="None" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.298133 4961 state_mem.go:35] "Initializing new in-memory state store" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.308503 4961 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.351193 4961 manager.go:334] "Starting Device Plugin manager" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.351816 4961 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.351835 4961 server.go:79] "Starting device plugin registration server" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.352281 4961 eviction_manager.go:189] "Eviction manager: starting control loop" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.352345 4961 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.352807 4961 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.352961 4961 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.352974 4961 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.358882 4961 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.413275 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="800ms" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.453795 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.455405 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.455444 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.455456 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.455487 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.456032 4961 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.17:6443: connect: connection refused" node="crc" Dec 05 17:33:15 crc kubenswrapper[4961]: W1205 17:33:15.547801 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.547920 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.565380 4961 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc"] Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.565491 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.566842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.566895 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.566917 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.567103 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.567338 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.567398 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.568645 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.568682 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.568692 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.568763 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.568888 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.568897 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569025 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569082 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569141 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569733 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569752 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.569920 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570122 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570158 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570630 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570648 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570656 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570909 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570940 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570957 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570966 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570955 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.570998 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.571055 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.571151 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.571189 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.571852 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.571910 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.571934 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.572210 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.572262 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.572589 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.572619 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.572628 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.573217 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.573267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.573291 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642040 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642102 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642154 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642197 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642326 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642372 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642416 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642478 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642530 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642551 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642583 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642602 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642617 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642685 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.642731 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.656559 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.658313 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.658413 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.658437 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.658584 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:15 crc kubenswrapper[4961]: E1205 17:33:15.659653 4961 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.17:6443: connect: connection refused" node="crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.743744 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.743920 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744020 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744091 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744116 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744033 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744231 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744267 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744300 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744323 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744355 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744358 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744387 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744395 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744404 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744429 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744457 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744483 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744512 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744537 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744537 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744586 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744593 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744585 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744623 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744628 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744581 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.744667 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.745266 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.745323 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.807564 4961 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.889901 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.913533 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.919076 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.938345 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Dec 05 17:33:15 crc kubenswrapper[4961]: I1205 17:33:15.952241 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.013196 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-ed0765504ac34d52d0e00137571200117486dd258589cd7ce04088fe9037f405 WatchSource:0}: Error finding container ed0765504ac34d52d0e00137571200117486dd258589cd7ce04088fe9037f405: Status 404 returned error can't find the container with id ed0765504ac34d52d0e00137571200117486dd258589cd7ce04088fe9037f405 Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.013644 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:16 crc kubenswrapper[4961]: E1205 17:33:16.013739 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.017535 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-5365f452dcf47e5a7cfd1f880e6f7a5c8a27f796bc37ae7abba4c4ac2428315e WatchSource:0}: Error finding container 5365f452dcf47e5a7cfd1f880e6f7a5c8a27f796bc37ae7abba4c4ac2428315e: Status 404 returned error can't find the container with id 5365f452dcf47e5a7cfd1f880e6f7a5c8a27f796bc37ae7abba4c4ac2428315e Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.031842 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-dcf28fe65b496c855c7d94ad72f3b18ddab5f4791dcf8b8e66f7b88b5b6dd55e WatchSource:0}: Error finding container dcf28fe65b496c855c7d94ad72f3b18ddab5f4791dcf8b8e66f7b88b5b6dd55e: Status 404 returned error can't find the container with id dcf28fe65b496c855c7d94ad72f3b18ddab5f4791dcf8b8e66f7b88b5b6dd55e Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.032368 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-3fd1088be73cc27b0cda9bae1f63b4a2859a6da22e1689afb1a8161be63f958c WatchSource:0}: Error finding container 3fd1088be73cc27b0cda9bae1f63b4a2859a6da22e1689afb1a8161be63f958c: Status 404 returned error can't find the container with id 3fd1088be73cc27b0cda9bae1f63b4a2859a6da22e1689afb1a8161be63f958c Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.060729 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.062823 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.062860 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.062871 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.062905 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:16 crc kubenswrapper[4961]: E1205 17:33:16.063412 4961 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.17:6443: connect: connection refused" node="crc" Dec 05 17:33:16 crc kubenswrapper[4961]: E1205 17:33:16.214204 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="1.6s" Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.264859 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:16 crc kubenswrapper[4961]: E1205 17:33:16.264981 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:16 crc kubenswrapper[4961]: W1205 17:33:16.686762 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:16 crc kubenswrapper[4961]: E1205 17:33:16.686866 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.807080 4961 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.863506 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.865068 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.865118 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.865133 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.865164 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:16 crc kubenswrapper[4961]: E1205 17:33:16.865591 4961 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.17:6443: connect: connection refused" node="crc" Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.869345 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ed0765504ac34d52d0e00137571200117486dd258589cd7ce04088fe9037f405"} Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.870412 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"dcf28fe65b496c855c7d94ad72f3b18ddab5f4791dcf8b8e66f7b88b5b6dd55e"} Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.871859 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"3fd1088be73cc27b0cda9bae1f63b4a2859a6da22e1689afb1a8161be63f958c"} Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.877031 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d5ccdc1f04bd1ec1bbe73f672d1f69f2899394f67bc6b61c01930dd207538a11"} Dec 05 17:33:16 crc kubenswrapper[4961]: I1205 17:33:16.878902 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5365f452dcf47e5a7cfd1f880e6f7a5c8a27f796bc37ae7abba4c4ac2428315e"} Dec 05 17:33:17 crc kubenswrapper[4961]: W1205 17:33:17.491698 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:17 crc kubenswrapper[4961]: E1205 17:33:17.492288 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.806807 4961 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:17 crc kubenswrapper[4961]: E1205 17:33:17.815526 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="3.2s" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.913037 4961 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="ab849273c40237ded77a1468bb637603b25a3230c38d9274aea0c415be5f268a" exitCode=0 Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.913179 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"ab849273c40237ded77a1468bb637603b25a3230c38d9274aea0c415be5f268a"} Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.913537 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.914623 4961 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="f0e0ffa9b795f35f45f7ac6546f3bcdfa7fdf230f0965b558f527bbca4576bea" exitCode=0 Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.914701 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"f0e0ffa9b795f35f45f7ac6546f3bcdfa7fdf230f0965b558f527bbca4576bea"} Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.915396 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.915435 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.915449 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.915941 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d"} Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.916127 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.920162 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.920204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.920218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.922626 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857"} Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.924337 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec"} Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.924445 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.925311 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.925338 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.925351 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.930201 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.931850 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.931992 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:17 crc kubenswrapper[4961]: I1205 17:33:17.932013 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:18 crc kubenswrapper[4961]: W1205 17:33:18.311175 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:18 crc kubenswrapper[4961]: E1205 17:33:18.311309 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.466262 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.467765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.467865 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.467891 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.467933 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:18 crc kubenswrapper[4961]: E1205 17:33:18.468613 4961 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.17:6443: connect: connection refused" node="crc" Dec 05 17:33:18 crc kubenswrapper[4961]: W1205 17:33:18.709870 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:18 crc kubenswrapper[4961]: E1205 17:33:18.709979 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:18 crc kubenswrapper[4961]: W1205 17:33:18.753258 4961 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:18 crc kubenswrapper[4961]: E1205 17:33:18.753414 4961 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.17:6443: connect: connection refused" logger="UnhandledError" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.807323 4961 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.930110 4961 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d" exitCode=0 Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.930236 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d"} Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.930284 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.932014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.932084 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.932109 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.932833 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520"} Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.934824 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec" exitCode=0 Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.934898 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec"} Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.936698 4961 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="39f588f30d26682dfcc6b32545ceeac93a73f31c89269b84096f8174fcf82d8d" exitCode=0 Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.936835 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.936825 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"39f588f30d26682dfcc6b32545ceeac93a73f31c89269b84096f8174fcf82d8d"} Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.936943 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.937979 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.938025 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.938041 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.938145 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.938181 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:18 crc kubenswrapper[4961]: I1205 17:33:18.938202 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.807264 4961 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.17:6443: connect: connection refused Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.945026 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.945079 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.945090 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.946985 4961 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="0dc85e280918e1a1769b4dd56412814712d38c26fc97d201fc5af14583e786af" exitCode=0 Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.947045 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"0dc85e280918e1a1769b4dd56412814712d38c26fc97d201fc5af14583e786af"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.947128 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.948234 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.948273 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.948286 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.948773 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"15fe35dc4f037f2889b7f48da342dddb78d84800a2a4cc190a24c2d727e2b974"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.948876 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.949569 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.949624 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.949637 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.952551 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.952582 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.952593 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.952609 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.953510 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.953572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.953585 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.955791 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.955843 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f"} Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.955947 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.956689 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.956717 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:19 crc kubenswrapper[4961]: I1205 17:33:19.956729 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.854799 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.967761 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6f1011369e2a155e3094a890fe350acd3782cf34d36532216cd8323846a8071d"} Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.967853 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"6d09ae003d7f522e60ef397fdc34d096cafb7c77c57f27148bfd0388462098ec"} Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.967872 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"0f8279ee0402cf6bae70e3a4f91a50bad6705ca0dd0b79a5b5d464637eea1fef"} Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.967884 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"bc84dc2c81a35badead962256f009cc8f6b4ebb03ed6988e7e39c933ece8dbed"} Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.972034 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.972166 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04"} Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.972241 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548"} Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.972267 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.972401 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.972578 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973228 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973260 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973287 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973299 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973288 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973402 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973619 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973650 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:20 crc kubenswrapper[4961]: I1205 17:33:20.973670 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.669087 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.670578 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.670621 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.670635 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.670668 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.980164 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.980977 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981357 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981393 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"c95e35f37092f1c7fd01b78334f2abd169cd237355d00dbe68bd64c65538d98a"} Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981475 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981482 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981869 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981903 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981949 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.981984 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982000 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982287 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982322 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982335 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982930 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982950 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:21 crc kubenswrapper[4961]: I1205 17:33:21.982960 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.045199 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.957996 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.983197 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.983329 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.984054 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.984571 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.984644 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.984662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.985064 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.985103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.985121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.985067 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.985244 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:22 crc kubenswrapper[4961]: I1205 17:33:22.985270 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.762037 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.855463 4961 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.855560 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.985937 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.985937 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.987898 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.987931 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.987966 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.987943 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.987983 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:23 crc kubenswrapper[4961]: I1205 17:33:23.987994 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:24 crc kubenswrapper[4961]: I1205 17:33:24.229718 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:24 crc kubenswrapper[4961]: I1205 17:33:24.988714 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:24 crc kubenswrapper[4961]: I1205 17:33:24.989619 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:24 crc kubenswrapper[4961]: I1205 17:33:24.989660 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:24 crc kubenswrapper[4961]: I1205 17:33:24.989679 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:25 crc kubenswrapper[4961]: E1205 17:33:25.359053 4961 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Dec 05 17:33:26 crc kubenswrapper[4961]: I1205 17:33:26.262436 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Dec 05 17:33:26 crc kubenswrapper[4961]: I1205 17:33:26.262673 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:26 crc kubenswrapper[4961]: I1205 17:33:26.264227 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:26 crc kubenswrapper[4961]: I1205 17:33:26.264279 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:26 crc kubenswrapper[4961]: I1205 17:33:26.264292 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.166050 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.166659 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.167982 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.168016 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.168028 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.180524 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.302709 4961 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.302800 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.308574 4961 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Dec 05 17:33:29 crc kubenswrapper[4961]: I1205 17:33:29.308639 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.004523 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.005561 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.005609 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.005630 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.008743 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.359641 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.359895 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.361052 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.361116 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.361129 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:30 crc kubenswrapper[4961]: I1205 17:33:30.408444 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.007568 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.007626 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.008739 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.008790 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.008805 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.009114 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.009170 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.009191 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:31 crc kubenswrapper[4961]: I1205 17:33:31.020443 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Dec 05 17:33:32 crc kubenswrapper[4961]: I1205 17:33:32.010299 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:32 crc kubenswrapper[4961]: I1205 17:33:32.014220 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:32 crc kubenswrapper[4961]: I1205 17:33:32.014497 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:32 crc kubenswrapper[4961]: I1205 17:33:32.014563 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.767843 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.768853 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.769441 4961 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.769512 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.770481 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.770515 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.770527 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.773682 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.856558 4961 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Dec 05 17:33:33 crc kubenswrapper[4961]: I1205 17:33:33.856637 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.016370 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.016909 4961 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.016988 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.017987 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.018046 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.018074 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.296754 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.299869 4961 trace.go:236] Trace[2016589140]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:33:24.115) (total time: 10184ms): Dec 05 17:33:34 crc kubenswrapper[4961]: Trace[2016589140]: ---"Objects listed" error: 10184ms (17:33:34.299) Dec 05 17:33:34 crc kubenswrapper[4961]: Trace[2016589140]: [10.184304155s] [10.184304155s] END Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.299892 4961 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.300294 4961 trace.go:236] Trace[1491768553]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:33:23.758) (total time: 10541ms): Dec 05 17:33:34 crc kubenswrapper[4961]: Trace[1491768553]: ---"Objects listed" error: 10541ms (17:33:34.300) Dec 05 17:33:34 crc kubenswrapper[4961]: Trace[1491768553]: [10.541416308s] [10.541416308s] END Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.300447 4961 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.300808 4961 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.300813 4961 trace.go:236] Trace[888164340]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (05-Dec-2025 17:33:20.706) (total time: 13593ms): Dec 05 17:33:34 crc kubenswrapper[4961]: Trace[888164340]: ---"Objects listed" error: 13593ms (17:33:34.300) Dec 05 17:33:34 crc kubenswrapper[4961]: Trace[888164340]: [13.593826475s] [13.593826475s] END Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.300963 4961 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.302647 4961 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.303520 4961 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.669810 4961 apiserver.go:52] "Watching apiserver" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.744053 4961 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.744389 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf"] Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.744823 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.744844 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.744904 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.745330 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.745386 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.745461 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.745526 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.745612 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.745762 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.750435 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.750631 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.750939 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.751500 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.752003 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.755290 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.755329 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.755395 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.755298 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.779380 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.797217 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.807422 4961 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.811483 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.833222 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.851352 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.867931 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.887200 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.900651 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.907638 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.907922 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908008 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908076 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908151 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908256 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908346 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908400 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908407 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908443 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908625 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908696 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908629 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908651 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908766 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908914 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908940 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908827 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908967 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908994 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909017 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909038 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909066 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909088 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909108 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909127 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909147 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909168 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909187 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908957 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909219 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909226 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908740 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.908870 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909095 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909180 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909206 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909378 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909440 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909404 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909462 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909483 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909493 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909507 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909531 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909552 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909572 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909594 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909613 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909634 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909629 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909654 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909662 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909675 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909694 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909713 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909730 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909752 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909769 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909769 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909808 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909827 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909846 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909863 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909879 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909887 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909897 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909916 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909936 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909955 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.909994 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910010 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910026 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910042 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910056 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910086 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910094 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910106 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910128 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910147 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910167 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910184 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910199 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910214 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910232 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910253 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910270 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910288 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910309 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910326 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910342 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910360 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910377 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910394 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910410 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910427 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910443 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910459 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910480 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910503 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910522 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910538 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910554 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910580 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910598 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910617 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910636 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910652 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911075 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911101 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911120 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911138 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911155 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911174 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911190 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911217 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911236 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911255 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911276 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911299 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911320 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911337 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911355 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911377 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911396 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911412 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911431 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911454 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911471 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911489 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911507 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911527 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911548 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911568 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911585 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911604 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911623 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911646 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911668 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911690 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911710 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911730 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911750 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911769 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911812 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911833 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911852 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911871 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911892 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911912 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911930 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911950 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911968 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911986 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912005 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912024 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912046 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912065 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912085 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912105 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912123 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912141 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912160 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912179 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912196 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912215 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912234 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912253 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912272 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912291 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912312 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912330 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912348 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912364 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912380 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912397 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912422 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912438 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912457 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912473 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912489 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912506 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912524 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912544 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912561 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912578 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912599 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912619 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912637 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912655 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912802 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912835 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912857 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912875 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912898 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912929 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912952 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912973 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913004 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913035 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913060 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914486 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914508 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914532 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914550 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914578 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914598 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914618 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914636 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914655 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914674 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914693 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914711 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914730 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914749 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910107 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.910863 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911054 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911113 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911214 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911267 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915948 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911357 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911465 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911519 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911588 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911666 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911729 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911805 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911764 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.911968 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.912005 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913156 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913479 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913498 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913538 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913592 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.913957 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914035 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914130 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914322 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914411 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.914889 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915180 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915267 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915283 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915321 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915383 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915570 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915576 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915736 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915860 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915969 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916272 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916484 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.915588 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916835 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916859 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916866 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916884 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916912 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916941 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916966 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.916988 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917009 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917037 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917064 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917084 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917112 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917077 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917141 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917166 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917164 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917322 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917339 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917351 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917365 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917357 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917376 4961 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917391 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917393 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917406 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917538 4961 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917561 4961 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917584 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917602 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917620 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917637 4961 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917656 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917673 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917688 4961 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917704 4961 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917720 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917739 4961 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917754 4961 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917812 4961 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917828 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917842 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917859 4961 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917875 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917900 4961 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917918 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917933 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917949 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917963 4961 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917977 4961 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.917992 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918006 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918030 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918044 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918058 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918072 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918102 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918124 4961 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918146 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918171 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918186 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918202 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918217 4961 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918231 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918249 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918263 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918277 4961 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918292 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918306 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918322 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918324 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918336 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918428 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918441 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918452 4961 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918463 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918504 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918524 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918537 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918576 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918592 4961 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918612 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918623 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918092 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918116 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918567 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918516 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918731 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.918859 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.919120 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.919174 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.919267 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.919290 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:35.419227736 +0000 UTC m=+21.480378209 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.919371 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.919422 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.919755 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.919818 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.920082 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.920119 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.920092 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.920596 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.920698 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.921097 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.921189 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.921411 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.921637 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.921753 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.921842 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:35.421827875 +0000 UTC m=+21.482978348 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.922061 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.921706 4961 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.923901 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.923982 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.924181 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.924227 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.924357 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.924552 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.925034 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.925569 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.925702 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.926117 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.926357 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.926453 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.926544 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.926763 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.928298 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.928405 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.929151 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.929372 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.929542 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.930702 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.930944 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.931982 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.932535 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.932944 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.932987 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933059 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933208 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933419 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933601 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933738 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933815 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933834 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933875 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.933978 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.934247 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.934845 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.934969 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.934998 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.935367 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.935384 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.935396 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.935426 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.935457 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:35.435435627 +0000 UTC m=+21.496586100 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.936131 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.936405 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.937181 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.937374 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.937497 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.938030 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.938089 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.938370 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.938791 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.937089 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.938930 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.938956 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.938971 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.939017 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:35.439003128 +0000 UTC m=+21.500153611 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.939119 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.939486 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.939672 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.940851 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.940888 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.941490 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.942673 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.944990 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.945236 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.945306 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.945469 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.945650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.945740 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.945940 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: E1205 17:33:34.945978 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:33:35.445950867 +0000 UTC m=+21.507101340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.946243 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.946251 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.946291 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.946902 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.946929 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.947957 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948034 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948060 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948231 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948452 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948488 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948508 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948588 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948613 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.948828 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.949058 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.949123 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.949676 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.949863 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.949988 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.950176 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.950757 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.951807 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.952199 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.952265 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.952418 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.953113 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.953469 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.953901 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.953906 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.953987 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.954098 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.954493 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.955389 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.955413 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.955580 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.955925 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.956178 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.956655 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.956679 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.956900 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.957053 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.957330 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.957547 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.957558 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.959136 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.960325 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.963217 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.963658 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.966560 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.973915 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.978343 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.978732 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.988096 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:33:34 crc kubenswrapper[4961]: I1205 17:33:34.989302 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019457 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019515 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019637 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019651 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019662 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019673 4961 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019703 4961 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019716 4961 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019726 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019736 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019750 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019760 4961 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019793 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019806 4961 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019870 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019885 4961 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019901 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019915 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019929 4961 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019943 4961 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019957 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019969 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019982 4961 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.019995 4961 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020007 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020020 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020022 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020033 4961 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020046 4961 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020107 4961 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020122 4961 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020249 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020260 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020269 4961 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020301 4961 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020313 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020323 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020515 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020600 4961 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020642 4961 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020656 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020667 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020679 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020692 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020733 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020744 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020767 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020860 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020873 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020882 4961 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020892 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020964 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020975 4961 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.020984 4961 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021011 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021049 4961 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021070 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021138 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021150 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021160 4961 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021185 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021212 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021223 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021232 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021242 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021252 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021262 4961 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021285 4961 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021306 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021328 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021336 4961 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021345 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021353 4961 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021363 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021396 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021406 4961 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021415 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021424 4961 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021433 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021441 4961 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021450 4961 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021471 4961 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021480 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021488 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021496 4961 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021520 4961 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021541 4961 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021550 4961 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021558 4961 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021566 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021575 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021584 4961 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021605 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021614 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021621 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021641 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021650 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021659 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021674 4961 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021682 4961 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021691 4961 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021700 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021709 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021719 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021727 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021736 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021745 4961 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021753 4961 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021765 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021744 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021824 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.021950 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022394 4961 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022418 4961 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022427 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022449 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022460 4961 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022468 4961 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022477 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022485 4961 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022493 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022502 4961 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022516 4961 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022524 4961 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022533 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022542 4961 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022551 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022668 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022680 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022692 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022730 4961 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022762 4961 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022788 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022798 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022816 4961 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.022837 4961 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.023033 4961 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.024304 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04" exitCode=255 Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.024348 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04"} Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.039510 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.039535 4961 scope.go:117] "RemoveContainer" containerID="94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.039857 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.054061 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.060240 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.070158 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.074587 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.079945 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.093109 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.104307 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: W1205 17:33:35.104636 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-bf5b1b0b46b509ec7f2b219d6f86db98f115b3413c9255a2287418ed96ee7907 WatchSource:0}: Error finding container bf5b1b0b46b509ec7f2b219d6f86db98f115b3413c9255a2287418ed96ee7907: Status 404 returned error can't find the container with id bf5b1b0b46b509ec7f2b219d6f86db98f115b3413c9255a2287418ed96ee7907 Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.116969 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.427091 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.427149 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.427349 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.427412 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:36.427398035 +0000 UTC m=+22.488548508 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.427344 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.427572 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:36.427542859 +0000 UTC m=+22.488693382 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.528475 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.528616 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.528739 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:33:36.528690201 +0000 UTC m=+22.589840834 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.528883 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: I1205 17:33:35.528890 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.528911 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.529005 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.528977 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.529171 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.529198 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.529081 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:36.52905866 +0000 UTC m=+22.590209173 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:35 crc kubenswrapper[4961]: E1205 17:33:35.529297 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:36.529275875 +0000 UTC m=+22.590426548 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.029268 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.029338 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.029352 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"95dd3f1f96abc8d6122bd4bc91fe128a1b0cf6da2d44de78ff5e788eb2b9b402"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.031621 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.031693 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"939d7d256a63c2911d2d5a6a73c835abf01de33fbb95015dbe53ab9de2fb55b1"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.034265 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.040310 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.040591 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.041647 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"bf5b1b0b46b509ec7f2b219d6f86db98f115b3413c9255a2287418ed96ee7907"} Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.086730 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.143833 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.172305 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.187659 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.203236 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.221189 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.239756 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.267718 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.285519 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.300285 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.313974 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.330089 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.355056 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.373018 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.437517 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.437593 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.437744 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.437887 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:38.43786171 +0000 UTC m=+24.499012193 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.438381 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.438446 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:38.438434913 +0000 UTC m=+24.499585446 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.538514 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.538622 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:33:38.538605194 +0000 UTC m=+24.599755667 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.538747 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.538896 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.538911 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.538924 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.538970 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:38.538959572 +0000 UTC m=+24.600110055 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.539289 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.539402 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.539417 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.539425 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.539462 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:38.539453293 +0000 UTC m=+24.600603766 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.760132 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-4vc27"] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.760598 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.761925 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-sxfzb"] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.762499 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.762554 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.762744 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.763008 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.764845 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.765324 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.765492 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.766102 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.766259 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.766418 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/node-resolver-pbqp7"] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.766741 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.767710 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.769133 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5w9vd"] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.769534 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.769867 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.770182 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.770334 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-bgtgs"] Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.770691 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.771067 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.772980 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773172 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773220 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773383 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773532 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773582 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773707 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773880 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.773951 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.774066 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.782184 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.798682 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.813235 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.828167 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841041 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841674 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-os-release\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841734 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-k8s-cni-cncf-io\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841764 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-hostroot\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841807 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-cni-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841833 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841856 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-bin\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841894 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-netns\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841916 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-multus-certs\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841942 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cni-binary-copy\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.841978 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-node-log\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842061 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pr468\" (UniqueName: \"kubernetes.io/projected/f64daea3-7a90-4012-bd0c-31b137bd1cae-kube-api-access-pr468\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842166 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/26618630-1782-4ae8-af12-6f913fbddf5b-multus-daemon-config\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842196 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-slash\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842216 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-log-socket\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842241 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842268 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crm8x\" (UniqueName: \"kubernetes.io/projected/c048c267-061b-479b-9d63-b3aee093d9f6-kube-api-access-crm8x\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842405 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-os-release\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842484 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nvkk4\" (UniqueName: \"kubernetes.io/projected/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-kube-api-access-nvkk4\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842512 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d30454a9-4134-4c4e-a5e6-098cf956a769-hosts-file\") pod \"node-resolver-pbqp7\" (UID: \"d30454a9-4134-4c4e-a5e6-098cf956a769\") " pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842533 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-netns\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842572 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-kubelet\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842595 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c048c267-061b-479b-9d63-b3aee093d9f6-proxy-tls\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842615 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-etc-kubernetes\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842672 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-ovn-kubernetes\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842727 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842763 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-system-cni-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842812 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-cnibin\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842835 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-cni-bin\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842857 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-system-cni-dir\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842899 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-kubelet\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842924 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-var-lib-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.842983 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-cni-multus\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843025 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/26618630-1782-4ae8-af12-6f913fbddf5b-cni-binary-copy\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843057 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t82bj\" (UniqueName: \"kubernetes.io/projected/d30454a9-4134-4c4e-a5e6-098cf956a769-kube-api-access-t82bj\") pod \"node-resolver-pbqp7\" (UID: \"d30454a9-4134-4c4e-a5e6-098cf956a769\") " pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843078 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843098 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-netd\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843123 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-config\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843152 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-ovn\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843176 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-env-overrides\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843200 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2l2tj\" (UniqueName: \"kubernetes.io/projected/26618630-1782-4ae8-af12-6f913fbddf5b-kube-api-access-2l2tj\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843224 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-script-lib\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843268 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-conf-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843293 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c048c267-061b-479b-9d63-b3aee093d9f6-rootfs\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843309 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovn-node-metrics-cert\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843327 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-etc-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843390 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-socket-dir-parent\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843427 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-systemd\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843452 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cnibin\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843474 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c048c267-061b-479b-9d63-b3aee093d9f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.843505 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-systemd-units\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.852566 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.862884 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.862932 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.863068 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.863249 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.863534 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:36 crc kubenswrapper[4961]: E1205 17:33:36.863695 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.866639 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.868273 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.869527 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.871047 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.871863 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.873150 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.873853 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.874592 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.875868 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.876695 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.877906 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.878512 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.879879 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.880533 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.881262 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.882385 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.883044 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.884257 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.884824 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.885461 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.886061 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.886626 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.887178 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.888444 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.889000 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.890185 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.890730 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.891455 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.892747 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.893336 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.894317 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.894758 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.895593 4961 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.895694 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.897381 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.898277 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.898672 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.900124 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.900773 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.901653 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.902327 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.903329 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.903775 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.904846 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.905487 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.906454 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.907013 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.906991 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.907970 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.908451 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.909555 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.910080 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.910900 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.911354 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.912226 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.912817 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.913303 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.934709 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944083 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-ovn-kubernetes\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944142 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944175 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-etc-kubernetes\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944215 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-system-cni-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944239 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-cnibin\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944273 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-cni-bin\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944302 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944303 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-system-cni-dir\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944370 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-system-cni-dir\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944392 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-var-lib-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944419 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-kubelet\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944441 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-system-cni-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944459 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-cni-multus\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944476 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-cnibin\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944237 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-ovn-kubernetes\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944483 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/26618630-1782-4ae8-af12-6f913fbddf5b-cni-binary-copy\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944534 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t82bj\" (UniqueName: \"kubernetes.io/projected/d30454a9-4134-4c4e-a5e6-098cf956a769-kube-api-access-t82bj\") pod \"node-resolver-pbqp7\" (UID: \"d30454a9-4134-4c4e-a5e6-098cf956a769\") " pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944588 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-netd\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944621 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-config\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944654 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944677 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-ovn\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944726 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-env-overrides\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944752 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2l2tj\" (UniqueName: \"kubernetes.io/projected/26618630-1782-4ae8-af12-6f913fbddf5b-kube-api-access-2l2tj\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944791 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-script-lib\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944817 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c048c267-061b-479b-9d63-b3aee093d9f6-rootfs\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944842 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovn-node-metrics-cert\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944866 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-conf-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944911 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-socket-dir-parent\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944934 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-systemd\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944958 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-etc-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944982 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cnibin\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945008 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c048c267-061b-479b-9d63-b3aee093d9f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945037 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-systemd-units\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945067 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-cni-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945088 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-os-release\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945112 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-k8s-cni-cncf-io\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945133 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/26618630-1782-4ae8-af12-6f913fbddf5b-cni-binary-copy\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945174 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-hostroot\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945176 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-cni-bin\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945205 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-kubelet\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945219 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-var-lib-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945233 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-cni-multus\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.944276 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-etc-kubernetes\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945135 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-hostroot\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945285 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-netns\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945314 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-multus-certs\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945340 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cni-binary-copy\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945360 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945382 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-bin\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945415 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/26618630-1782-4ae8-af12-6f913fbddf5b-multus-daemon-config\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945438 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-slash\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945457 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-node-log\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945480 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pr468\" (UniqueName: \"kubernetes.io/projected/f64daea3-7a90-4012-bd0c-31b137bd1cae-kube-api-access-pr468\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945501 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945529 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crm8x\" (UniqueName: \"kubernetes.io/projected/c048c267-061b-479b-9d63-b3aee093d9f6-kube-api-access-crm8x\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945548 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-log-socket\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945569 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nvkk4\" (UniqueName: \"kubernetes.io/projected/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-kube-api-access-nvkk4\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945593 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d30454a9-4134-4c4e-a5e6-098cf956a769-hosts-file\") pod \"node-resolver-pbqp7\" (UID: \"d30454a9-4134-4c4e-a5e6-098cf956a769\") " pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945640 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-netns\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945660 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-os-release\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945683 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-kubelet\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.945702 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c048c267-061b-479b-9d63-b3aee093d9f6-proxy-tls\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946014 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946093 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-conf-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946151 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-socket-dir-parent\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946176 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-systemd\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946181 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-netd\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946223 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cnibin\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946200 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-etc-openvswitch\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.946555 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-bin\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947119 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-config\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947140 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c048c267-061b-479b-9d63-b3aee093d9f6-mcd-auth-proxy-config\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947207 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-systemd-units\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947253 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/26618630-1782-4ae8-af12-6f913fbddf5b-multus-daemon-config\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947278 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-multus-cni-dir\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947318 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-slash\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947322 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-node-log\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947332 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-ovn\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947547 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-tuning-conf-dir\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947563 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-os-release\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947603 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-k8s-cni-cncf-io\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947711 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-netns\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947749 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-run-multus-certs\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.947857 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/d30454a9-4134-4c4e-a5e6-098cf956a769-hosts-file\") pod \"node-resolver-pbqp7\" (UID: \"d30454a9-4134-4c4e-a5e6-098cf956a769\") " pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948038 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948058 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-log-socket\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948092 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-netns\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948151 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-os-release\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948191 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/26618630-1782-4ae8-af12-6f913fbddf5b-host-var-lib-kubelet\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948203 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-env-overrides\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948230 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c048c267-061b-479b-9d63-b3aee093d9f6-rootfs\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948341 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-script-lib\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.948541 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-cni-binary-copy\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.954412 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c048c267-061b-479b-9d63-b3aee093d9f6-proxy-tls\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.955411 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovn-node-metrics-cert\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.963328 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:36Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.977741 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nvkk4\" (UniqueName: \"kubernetes.io/projected/d34d6a8d-1b83-4af1-afd3-76ba46d02e3b-kube-api-access-nvkk4\") pod \"multus-additional-cni-plugins-sxfzb\" (UID: \"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\") " pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.978761 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2l2tj\" (UniqueName: \"kubernetes.io/projected/26618630-1782-4ae8-af12-6f913fbddf5b-kube-api-access-2l2tj\") pod \"multus-bgtgs\" (UID: \"26618630-1782-4ae8-af12-6f913fbddf5b\") " pod="openshift-multus/multus-bgtgs" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.978910 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crm8x\" (UniqueName: \"kubernetes.io/projected/c048c267-061b-479b-9d63-b3aee093d9f6-kube-api-access-crm8x\") pod \"machine-config-daemon-4vc27\" (UID: \"c048c267-061b-479b-9d63-b3aee093d9f6\") " pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.979172 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t82bj\" (UniqueName: \"kubernetes.io/projected/d30454a9-4134-4c4e-a5e6-098cf956a769-kube-api-access-t82bj\") pod \"node-resolver-pbqp7\" (UID: \"d30454a9-4134-4c4e-a5e6-098cf956a769\") " pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:36 crc kubenswrapper[4961]: I1205 17:33:36.989758 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pr468\" (UniqueName: \"kubernetes.io/projected/f64daea3-7a90-4012-bd0c-31b137bd1cae-kube-api-access-pr468\") pod \"ovnkube-node-5w9vd\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.013637 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.031604 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.051224 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.068121 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.082947 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.089841 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.090863 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" Dec 05 17:33:37 crc kubenswrapper[4961]: W1205 17:33:37.096066 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc048c267_061b_479b_9d63_b3aee093d9f6.slice/crio-0c480a32e024de794ddec1e49987a9f4a020de4adaf0eb41e2ac1261762fef6f WatchSource:0}: Error finding container 0c480a32e024de794ddec1e49987a9f4a020de4adaf0eb41e2ac1261762fef6f: Status 404 returned error can't find the container with id 0c480a32e024de794ddec1e49987a9f4a020de4adaf0eb41e2ac1261762fef6f Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.100728 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-pbqp7" Dec 05 17:33:37 crc kubenswrapper[4961]: W1205 17:33:37.104113 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd34d6a8d_1b83_4af1_afd3_76ba46d02e3b.slice/crio-f624f9bd4d5919b27b55bc2b92d2eec579cd396eaa30b6f59c7ec1a7875990d3 WatchSource:0}: Error finding container f624f9bd4d5919b27b55bc2b92d2eec579cd396eaa30b6f59c7ec1a7875990d3: Status 404 returned error can't find the container with id f624f9bd4d5919b27b55bc2b92d2eec579cd396eaa30b6f59c7ec1a7875990d3 Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.107933 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.110485 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.112120 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-bgtgs" Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.127766 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: W1205 17:33:37.137156 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf64daea3_7a90_4012_bd0c_31b137bd1cae.slice/crio-97c7c8cdb21ee1591d4a4b7780f671fae8cf19a9f096255dcc67f67ca9f56e1b WatchSource:0}: Error finding container 97c7c8cdb21ee1591d4a4b7780f671fae8cf19a9f096255dcc67f67ca9f56e1b: Status 404 returned error can't find the container with id 97c7c8cdb21ee1591d4a4b7780f671fae8cf19a9f096255dcc67f67ca9f56e1b Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.189111 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:37 crc kubenswrapper[4961]: W1205 17:33:37.190840 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod26618630_1782_4ae8_af12_6f913fbddf5b.slice/crio-e320055dd50a9c46d857d601ff90f805e2a9f9e1d0db0b434e2bb307ca1bfba1 WatchSource:0}: Error finding container e320055dd50a9c46d857d601ff90f805e2a9f9e1d0db0b434e2bb307ca1bfba1: Status 404 returned error can't find the container with id e320055dd50a9c46d857d601ff90f805e2a9f9e1d0db0b434e2bb307ca1bfba1 Dec 05 17:33:37 crc kubenswrapper[4961]: I1205 17:33:37.206056 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.049304 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca" exitCode=0 Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.049391 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.049829 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"97c7c8cdb21ee1591d4a4b7780f671fae8cf19a9f096255dcc67f67ca9f56e1b"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.052076 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-pbqp7" event={"ID":"d30454a9-4134-4c4e-a5e6-098cf956a769","Type":"ContainerStarted","Data":"52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.052140 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-pbqp7" event={"ID":"d30454a9-4134-4c4e-a5e6-098cf956a769","Type":"ContainerStarted","Data":"4cb5ca1744ea353647f258a77e12c5fd1a0bb77f400ae85d3c055d3dd87e0147"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.054601 4961 generic.go:334] "Generic (PLEG): container finished" podID="d34d6a8d-1b83-4af1-afd3-76ba46d02e3b" containerID="9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822" exitCode=0 Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.054691 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerDied","Data":"9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.054743 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerStarted","Data":"f624f9bd4d5919b27b55bc2b92d2eec579cd396eaa30b6f59c7ec1a7875990d3"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.057550 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.057640 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.057656 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"0c480a32e024de794ddec1e49987a9f4a020de4adaf0eb41e2ac1261762fef6f"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.059257 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.065408 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerStarted","Data":"99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.065633 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerStarted","Data":"e320055dd50a9c46d857d601ff90f805e2a9f9e1d0db0b434e2bb307ca1bfba1"} Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.075268 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.096833 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.127036 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.142042 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.163086 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.175340 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.190890 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.210327 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.229906 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.253797 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.267452 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.282016 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.297519 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.310913 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.329046 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.341215 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.362891 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.377438 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.392769 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.406251 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.421026 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.435520 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.450282 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.462228 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.464516 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.464601 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.464667 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.464726 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:42.464712625 +0000 UTC m=+28.525863098 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.464667 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.464767 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:42.464759156 +0000 UTC m=+28.525909629 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.565607 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.565737 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.565814 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.565911 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:33:42.56589078 +0000 UTC m=+28.627041253 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.565976 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.566008 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.566036 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.566062 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.566138 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:42.566116025 +0000 UTC m=+28.627266558 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.566017 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.566365 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.567129 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:42.567100206 +0000 UTC m=+28.628250749 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.863600 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.863691 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:38 crc kubenswrapper[4961]: I1205 17:33:38.863708 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.863865 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.863981 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:38 crc kubenswrapper[4961]: E1205 17:33:38.864062 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.073649 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8"} Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.077786 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerStarted","Data":"512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b"} Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.102477 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.126373 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.146599 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.163459 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.178050 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.192376 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.206202 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.231872 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.247202 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.268465 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.291072 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:39 crc kubenswrapper[4961]: I1205 17:33:39.311572 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.085077 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.085586 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.085606 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.085618 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.085630 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.086746 4961 generic.go:334] "Generic (PLEG): container finished" podID="d34d6a8d-1b83-4af1-afd3-76ba46d02e3b" containerID="512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b" exitCode=0 Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.086799 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerDied","Data":"512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.105284 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.121946 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.138506 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.154344 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.175831 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.191037 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.200949 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-qc49n"] Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.201352 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.203562 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.203788 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.204180 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.204242 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.206548 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.220525 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.239224 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.255233 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.270334 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.282685 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cscxd\" (UniqueName: \"kubernetes.io/projected/90ca8c0d-e913-462b-9366-b6609477abb1-kube-api-access-cscxd\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.283009 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/90ca8c0d-e913-462b-9366-b6609477abb1-serviceca\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.283284 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90ca8c0d-e913-462b-9366-b6609477abb1-host\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.286755 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.305567 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.319248 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.335469 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.348984 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.364288 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.379758 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.384547 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/90ca8c0d-e913-462b-9366-b6609477abb1-serviceca\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.384667 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90ca8c0d-e913-462b-9366-b6609477abb1-host\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.384703 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cscxd\" (UniqueName: \"kubernetes.io/projected/90ca8c0d-e913-462b-9366-b6609477abb1-kube-api-access-cscxd\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.384884 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/90ca8c0d-e913-462b-9366-b6609477abb1-host\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.385621 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/90ca8c0d-e913-462b-9366-b6609477abb1-serviceca\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.401559 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.406672 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cscxd\" (UniqueName: \"kubernetes.io/projected/90ca8c0d-e913-462b-9366-b6609477abb1-kube-api-access-cscxd\") pod \"node-ca-qc49n\" (UID: \"90ca8c0d-e913-462b-9366-b6609477abb1\") " pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.416807 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.430256 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.444947 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.463399 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.482824 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.495041 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.520313 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qc49n" Dec 05 17:33:40 crc kubenswrapper[4961]: W1205 17:33:40.535589 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90ca8c0d_e913_462b_9366_b6609477abb1.slice/crio-d7f6f08385f05f9ddd006e58d1af2b7198cecf91ca7f4799f51bacba70a51f5c WatchSource:0}: Error finding container d7f6f08385f05f9ddd006e58d1af2b7198cecf91ca7f4799f51bacba70a51f5c: Status 404 returned error can't find the container with id d7f6f08385f05f9ddd006e58d1af2b7198cecf91ca7f4799f51bacba70a51f5c Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.703605 4961 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.706407 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.706457 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.706468 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.706652 4961 kubelet_node_status.go:76] "Attempting to register node" node="crc" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.715753 4961 kubelet_node_status.go:115] "Node was previously registered" node="crc" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.716182 4961 kubelet_node_status.go:79] "Successfully registered node" node="crc" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.718469 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.718502 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.718513 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.718533 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.718550 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.739856 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.744486 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.744521 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.744532 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.744552 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.744564 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.758442 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.762267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.762309 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.762319 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.762339 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.762352 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.775405 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.779476 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.779512 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.779525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.779542 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.779553 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.795428 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.799600 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.799643 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.799654 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.799673 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.799684 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.812361 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.812490 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.814163 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.814191 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.814200 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.814215 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.814225 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.859217 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.862445 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.862463 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.862561 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.862864 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.862959 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:40 crc kubenswrapper[4961]: E1205 17:33:40.863014 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.873031 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.877322 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.881949 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.896453 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.913826 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.916459 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.916505 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.916522 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.916545 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.916559 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:40Z","lastTransitionTime":"2025-12-05T17:33:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.927711 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.947478 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.961173 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.975817 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:40 crc kubenswrapper[4961]: I1205 17:33:40.991706 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:40Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.010941 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.018368 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.018413 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.018422 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.018439 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.018450 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.028265 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.039170 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.050931 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.064307 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.080096 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.091559 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qc49n" event={"ID":"90ca8c0d-e913-462b-9366-b6609477abb1","Type":"ContainerStarted","Data":"1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.091636 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qc49n" event={"ID":"90ca8c0d-e913-462b-9366-b6609477abb1","Type":"ContainerStarted","Data":"d7f6f08385f05f9ddd006e58d1af2b7198cecf91ca7f4799f51bacba70a51f5c"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.093277 4961 generic.go:334] "Generic (PLEG): container finished" podID="d34d6a8d-1b83-4af1-afd3-76ba46d02e3b" containerID="4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604" exitCode=0 Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.093341 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerDied","Data":"4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.095971 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: E1205 17:33:41.101109 4961 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.112498 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.122807 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.122847 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.122857 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.122873 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.122883 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.128654 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.145703 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.160407 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.173919 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.192608 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.206240 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.220482 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.225162 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.225199 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.225212 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.225229 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.225240 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.234085 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.250516 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.264249 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.277201 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.290917 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.305642 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.322154 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.327904 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.327942 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.327952 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.327969 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.327979 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.334265 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.346516 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.359179 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.374016 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.385128 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.399213 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.412543 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.430700 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.430747 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.430759 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.430798 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.430811 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.436493 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.450379 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.463785 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.475502 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:41Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.532621 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.532676 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.532689 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.532710 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.532723 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.635653 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.635697 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.635710 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.635728 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.635750 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.737623 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.737655 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.737664 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.737679 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.737689 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.840492 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.840539 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.840549 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.840574 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.840587 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.943246 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.943283 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.943295 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.943312 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:41 crc kubenswrapper[4961]: I1205 17:33:41.943324 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:41Z","lastTransitionTime":"2025-12-05T17:33:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.046238 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.046290 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.046304 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.046322 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.046333 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.102262 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.105661 4961 generic.go:334] "Generic (PLEG): container finished" podID="d34d6a8d-1b83-4af1-afd3-76ba46d02e3b" containerID="df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4" exitCode=0 Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.105827 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerDied","Data":"df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.122515 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.137627 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.150643 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.150672 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.150681 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.150698 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.150707 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.157629 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.171811 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.188041 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.199886 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.213522 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.226815 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.243556 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.253847 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.253886 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.253897 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.253914 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.253925 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.259544 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.273738 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.288288 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.301050 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.347173 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.360312 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.360574 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.360656 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.360798 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.360889 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.465006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.465789 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.465856 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.465935 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.466106 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.506588 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.506905 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.506728 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.507106 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:50.507092397 +0000 UTC m=+36.568242860 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.507046 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.507268 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:50.507260962 +0000 UTC m=+36.568411435 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.569065 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.569112 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.569123 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.569140 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.569151 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.608032 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608194 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:33:50.608164249 +0000 UTC m=+36.669314722 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.608259 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.608305 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608498 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608516 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608530 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608559 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608608 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608624 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608587 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:50.608573138 +0000 UTC m=+36.669723611 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.608705 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:50.60868518 +0000 UTC m=+36.669835653 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.671658 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.671721 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.671738 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.671796 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.671820 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.774660 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.774724 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.774742 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.774765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.774814 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.863157 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.863326 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.863411 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.863565 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.863642 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:42 crc kubenswrapper[4961]: E1205 17:33:42.864095 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.877363 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.877399 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.877408 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.877423 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.877432 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.980231 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.980283 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.980297 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.980317 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:42 crc kubenswrapper[4961]: I1205 17:33:42.980331 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:42Z","lastTransitionTime":"2025-12-05T17:33:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.083209 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.083256 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.083267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.083283 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.083295 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.117570 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerStarted","Data":"e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.136294 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.153012 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.168317 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.181347 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.185835 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.185884 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.185902 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.185928 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.185945 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.198675 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.216202 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.232568 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.247603 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.266454 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.285122 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.288324 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.288510 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.288548 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.288580 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.288600 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.311853 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.328251 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.342601 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.353921 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:43Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.391547 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.391621 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.391634 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.391652 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.391665 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.494753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.494823 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.494837 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.494858 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.494885 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.597706 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.597755 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.597768 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.597803 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.597818 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.700578 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.700626 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.700640 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.700662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.700675 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.804178 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.804557 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.804570 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.804587 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.804599 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.911673 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.911715 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.911724 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.911743 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:43 crc kubenswrapper[4961]: I1205 17:33:43.911755 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:43Z","lastTransitionTime":"2025-12-05T17:33:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.013855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.013900 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.013913 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.013929 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.013943 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.116843 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.116895 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.116906 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.116922 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.116935 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.126788 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.127140 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.132564 4961 generic.go:334] "Generic (PLEG): container finished" podID="d34d6a8d-1b83-4af1-afd3-76ba46d02e3b" containerID="e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce" exitCode=0 Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.132612 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerDied","Data":"e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.144344 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.151164 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.161495 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.182718 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.201663 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.218164 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.220767 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.220841 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.220854 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.220878 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.220892 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.235190 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.250755 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.269675 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.284612 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.295345 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.309891 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.323907 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.323948 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.323960 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.323976 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.323989 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.326555 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.341626 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.353998 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.367822 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.382183 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.397487 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.415035 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.426293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.426328 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.426337 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.426353 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.426363 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.431140 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.445727 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.460446 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.472348 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.496547 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.510525 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.524234 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.528808 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.528851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.528860 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.528879 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.528889 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.543688 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.558840 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.578613 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.631560 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.631622 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.631641 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.631668 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.631686 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.740362 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.740421 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.740432 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.740451 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.740462 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.842491 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.842551 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.842566 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.842580 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.842590 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.865044 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.865076 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:44 crc kubenswrapper[4961]: E1205 17:33:44.865204 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.865320 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:44 crc kubenswrapper[4961]: E1205 17:33:44.865550 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:44 crc kubenswrapper[4961]: E1205 17:33:44.865633 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.880027 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.897149 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.911539 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.936530 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.946382 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.946416 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.946426 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.946442 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.946453 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:44Z","lastTransitionTime":"2025-12-05T17:33:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.955809 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.969125 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:44 crc kubenswrapper[4961]: I1205 17:33:44.989446 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.003726 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.017828 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.032700 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.049057 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.049311 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.049361 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.049377 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.049399 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.049415 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.063969 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.084993 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.102920 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.146109 4961 generic.go:334] "Generic (PLEG): container finished" podID="d34d6a8d-1b83-4af1-afd3-76ba46d02e3b" containerID="f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34" exitCode=0 Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.146721 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerDied","Data":"f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.146801 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.147342 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.152331 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.152378 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.152393 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.152414 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.152433 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.160409 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.181179 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.183674 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.205803 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.223450 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.239323 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.255819 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.256819 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.256864 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.256874 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.256892 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.256902 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.271605 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.290534 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.305741 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.328354 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.350485 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.359749 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.360094 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.360184 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.360333 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.360430 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.367329 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.389341 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.411404 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.425743 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.443026 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.456979 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.462863 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.462912 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.462927 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.462947 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.462959 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.481126 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.496024 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.518339 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.539838 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.565920 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.565941 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.565984 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.566003 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.566034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.566050 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.582722 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.598021 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.613240 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.628211 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.638887 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.649196 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.669082 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.669122 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.669131 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.669146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.669156 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.771917 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.771966 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.771975 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.771995 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.772007 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.875334 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.875381 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.875397 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.875415 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.875427 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.978554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.978592 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.978602 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.978621 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:45 crc kubenswrapper[4961]: I1205 17:33:45.978633 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:45Z","lastTransitionTime":"2025-12-05T17:33:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.081196 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.081269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.081281 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.081301 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.081315 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.155952 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" event={"ID":"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b","Type":"ContainerStarted","Data":"f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.179658 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.183666 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.183745 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.183756 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.183796 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.183808 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.195630 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.219511 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.238597 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.259515 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.278684 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.286130 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.286197 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.286214 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.286240 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.286258 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.299065 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.320583 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.331621 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.349962 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.366722 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.378630 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.388699 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.388747 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.388761 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.389089 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.389137 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.393849 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.406551 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:46Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.494224 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.494293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.494306 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.494324 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.494339 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.597189 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.597253 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.597263 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.597280 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.597294 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.700661 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.700712 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.700725 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.700748 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.700760 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.803467 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.803539 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.803600 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.803639 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.803654 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.862974 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:46 crc kubenswrapper[4961]: E1205 17:33:46.863135 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.862977 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.863236 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:46 crc kubenswrapper[4961]: E1205 17:33:46.863326 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:46 crc kubenswrapper[4961]: E1205 17:33:46.863500 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.906753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.906842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.906854 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.906879 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:46 crc kubenswrapper[4961]: I1205 17:33:46.906893 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:46Z","lastTransitionTime":"2025-12-05T17:33:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.010254 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.010297 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.010308 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.010326 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.010340 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.112939 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.112988 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.112999 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.113015 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.113026 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.160522 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/0.log" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.163977 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f" exitCode=1 Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.164029 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.164815 4961 scope.go:117] "RemoveContainer" containerID="be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.181376 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.197444 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.216383 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.216446 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.216464 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.216483 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.216494 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.222242 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.245007 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.280039 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.296614 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.311154 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.319851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.320060 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.320166 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.320247 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.320317 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.326842 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.344260 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.360468 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.377393 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.394814 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.412498 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.424068 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.424114 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.424156 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.424178 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.424191 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.434408 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:47Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.526707 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.526757 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.526770 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.526805 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.526823 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.630137 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.630191 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.630204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.630222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.630231 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.733004 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.733059 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.733078 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.733098 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.733112 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.836750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.836816 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.836829 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.836849 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.836861 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.940679 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.940730 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.940744 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.940764 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:47 crc kubenswrapper[4961]: I1205 17:33:47.940823 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:47Z","lastTransitionTime":"2025-12-05T17:33:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.043551 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.043605 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.043622 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.043645 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.043659 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.080221 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.096654 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.148506 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.148560 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.148576 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.148591 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.148601 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.160574 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.170348 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/0.log" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.173666 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.174176 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.180949 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.202733 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.220447 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.236941 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.252131 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.252189 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.252200 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.252222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.252234 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.252415 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.268248 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.285416 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.300865 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.315890 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.329682 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.346672 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.355006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.355067 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.355083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.355105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.355119 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.361856 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.381370 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.398291 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.410122 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.423532 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.438401 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.451875 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.456839 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.456875 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.456887 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.456904 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.456915 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.467332 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.480439 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.497488 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.516642 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.531539 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.545344 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.559181 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.560135 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.560215 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.560227 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.560245 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.560258 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.585215 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:48Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.662857 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.662926 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.662944 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.662976 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.662997 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.766115 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.766168 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.766179 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.766201 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.766218 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.863393 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.863459 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:48 crc kubenswrapper[4961]: E1205 17:33:48.863570 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.863462 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:48 crc kubenswrapper[4961]: E1205 17:33:48.863668 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:48 crc kubenswrapper[4961]: E1205 17:33:48.863990 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.869872 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.869924 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.869945 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.869967 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.869985 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.972951 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.973037 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.973067 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.973100 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:48 crc kubenswrapper[4961]: I1205 17:33:48.973126 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:48Z","lastTransitionTime":"2025-12-05T17:33:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.076043 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.076112 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.076136 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.076162 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.076183 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.179554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.179619 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.179639 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.179664 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.179683 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.284440 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.284538 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.285016 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.285118 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.285425 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.388740 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.388812 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.388825 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.388844 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.388857 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.491235 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.491608 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.491723 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.492223 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.492313 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.577531 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n"] Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.578463 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.581255 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.581922 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.595590 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.595674 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.595687 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.595709 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.595723 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.602970 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.619758 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.635517 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.649441 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.672194 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.687742 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.691731 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/31e6cede-369d-4288-b388-c28aae76a50b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.691905 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/31e6cede-369d-4288-b388-c28aae76a50b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.692051 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zj55\" (UniqueName: \"kubernetes.io/projected/31e6cede-369d-4288-b388-c28aae76a50b-kube-api-access-8zj55\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.692211 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/31e6cede-369d-4288-b388-c28aae76a50b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.698623 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.698676 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.698692 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.698714 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.698728 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.703117 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.722120 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.743986 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.760442 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.775656 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.789330 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.793574 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/31e6cede-369d-4288-b388-c28aae76a50b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.793638 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/31e6cede-369d-4288-b388-c28aae76a50b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.793667 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zj55\" (UniqueName: \"kubernetes.io/projected/31e6cede-369d-4288-b388-c28aae76a50b-kube-api-access-8zj55\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.793724 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/31e6cede-369d-4288-b388-c28aae76a50b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.794481 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/31e6cede-369d-4288-b388-c28aae76a50b-env-overrides\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.794870 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/31e6cede-369d-4288-b388-c28aae76a50b-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.801570 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.801617 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.801632 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.801657 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.801672 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.803887 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/31e6cede-369d-4288-b388-c28aae76a50b-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.809000 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.813705 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zj55\" (UniqueName: \"kubernetes.io/projected/31e6cede-369d-4288-b388-c28aae76a50b-kube-api-access-8zj55\") pod \"ovnkube-control-plane-749d76644c-fdb4n\" (UID: \"31e6cede-369d-4288-b388-c28aae76a50b\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.824039 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.835891 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:49Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.894683 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.904820 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.904996 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.905117 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.905236 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:49 crc kubenswrapper[4961]: I1205 17:33:49.905348 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:49Z","lastTransitionTime":"2025-12-05T17:33:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.007795 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.007836 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.007851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.007870 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.007884 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.110527 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.110578 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.110588 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.110605 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.110615 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.183055 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/1.log" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.183759 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/0.log" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.186621 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93" exitCode=1 Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.186687 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.186766 4961 scope.go:117] "RemoveContainer" containerID="be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.187564 4961 scope.go:117] "RemoveContainer" containerID="e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.187712 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.191625 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" event={"ID":"31e6cede-369d-4288-b388-c28aae76a50b","Type":"ContainerStarted","Data":"1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.191877 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" event={"ID":"31e6cede-369d-4288-b388-c28aae76a50b","Type":"ContainerStarted","Data":"25a39d0067ff9d61affe708dd55d8f49ac94b62e3f7e9d48a19cf3e817db0dd2"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.211059 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.213372 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.213410 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.213420 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.213436 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.213448 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.226700 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.240345 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.253576 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.267340 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.285980 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.301808 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.313930 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.316044 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.316103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.316114 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.316133 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.316144 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.328828 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.341070 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.357245 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.374655 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.390519 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.406456 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.418849 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.419152 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.419233 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.419321 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.419404 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.420290 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.521742 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.522083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.522205 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.522300 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.522388 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.603125 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.603196 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.603274 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.603333 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:06.603318003 +0000 UTC m=+52.664468476 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.603385 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.603494 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:06.603473517 +0000 UTC m=+52.664624070 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.624898 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.624951 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.624963 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.624981 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.624992 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.692258 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-pgc6p"] Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.692970 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.693047 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.704483 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.704610 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.704721 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:34:06.704629979 +0000 UTC m=+52.765780462 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.704802 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.704822 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.704835 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.704852 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.704875 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:06.704863875 +0000 UTC m=+52.766014348 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.705109 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.705163 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.705180 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.705277 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:06.705240424 +0000 UTC m=+52.766391077 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.714908 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.727408 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.727448 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.727460 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.727478 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.727493 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.734511 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.747593 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.772440 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.784402 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.797573 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.805831 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wz4w8\" (UniqueName: \"kubernetes.io/projected/6f24429b-a57e-47d0-8354-87ff9d6bcee8-kube-api-access-wz4w8\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.805909 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.814243 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.828509 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.830612 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.830656 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.830669 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.830692 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.830704 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.848452 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.862845 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.862947 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.862991 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.862952 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.863100 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.863184 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.863282 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.878513 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.891999 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.905254 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.906594 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wz4w8\" (UniqueName: \"kubernetes.io/projected/6f24429b-a57e-47d0-8354-87ff9d6bcee8-kube-api-access-wz4w8\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.906646 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.906788 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: E1205 17:33:50.906837 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:51.406822173 +0000 UTC m=+37.467972646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.916681 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.928180 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wz4w8\" (UniqueName: \"kubernetes.io/projected/6f24429b-a57e-47d0-8354-87ff9d6bcee8-kube-api-access-wz4w8\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.932382 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.933440 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.933493 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.933507 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.933527 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.933540 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:50Z","lastTransitionTime":"2025-12-05T17:33:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:50 crc kubenswrapper[4961]: I1205 17:33:50.948833 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:50Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.036245 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.036282 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.036291 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.036307 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.036317 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.139415 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.139466 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.139480 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.139498 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.139510 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.413233 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.413489 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.413599 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:52.413574121 +0000 UTC m=+38.474724694 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.456828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.456877 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.456888 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.456905 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.456915 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.462180 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" event={"ID":"31e6cede-369d-4288-b388-c28aae76a50b","Type":"ContainerStarted","Data":"809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.464181 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/1.log" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.470221 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.476417 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.476487 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.476511 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.476544 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.476567 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.484113 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.499672 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.504323 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.504389 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.504406 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.504432 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.504449 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.510017 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.524387 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.527438 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.528894 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.528965 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.528977 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.528998 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.529028 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.545897 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.550419 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.554829 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.554898 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.554912 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.554934 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.554948 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.565628 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.569677 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.569928 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.571872 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.571942 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.571957 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.571980 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.571995 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.580087 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.597253 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.614902 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.630310 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.646498 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.661958 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.676786 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.676845 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.676855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.676876 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.676887 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.679293 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.692526 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.705523 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.720613 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.737230 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:51Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.779757 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.779859 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.779878 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.779908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.779928 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.862556 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:51 crc kubenswrapper[4961]: E1205 17:33:51.862726 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.882925 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.882990 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.883006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.883025 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.883040 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.986604 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.986661 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.986673 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.986692 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:51 crc kubenswrapper[4961]: I1205 17:33:51.987067 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:51Z","lastTransitionTime":"2025-12-05T17:33:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.089919 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.089970 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.089986 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.090008 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.090021 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.192936 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.193010 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.193049 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.193083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.193110 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.296305 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.296542 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.296574 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.296604 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.296626 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.400022 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.400091 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.400110 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.400140 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.400158 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.426839 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:52 crc kubenswrapper[4961]: E1205 17:33:52.427122 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:52 crc kubenswrapper[4961]: E1205 17:33:52.427267 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:54.427220538 +0000 UTC m=+40.488371091 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.503439 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.503614 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.503626 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.503644 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.503657 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.606462 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.606550 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.606575 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.606605 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.606628 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.710106 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.710184 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.710202 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.710232 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.710251 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.813926 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.813997 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.814011 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.814034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.814048 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.863134 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.863247 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:52 crc kubenswrapper[4961]: E1205 17:33:52.863317 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.863418 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:52 crc kubenswrapper[4961]: E1205 17:33:52.863477 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:52 crc kubenswrapper[4961]: E1205 17:33:52.863566 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.916329 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.916401 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.916415 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.916432 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:52 crc kubenswrapper[4961]: I1205 17:33:52.916862 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:52Z","lastTransitionTime":"2025-12-05T17:33:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.020902 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.020970 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.020981 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.021001 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.021012 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.123665 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.123709 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.123718 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.123735 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.123746 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.226981 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.227085 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.227103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.227130 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.227149 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.330095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.330445 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.330529 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.330624 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.330714 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.433024 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.433090 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.433111 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.433136 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.433154 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.535894 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.536583 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.536681 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.536810 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.536919 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.640222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.640262 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.640274 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.640291 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.640303 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.743288 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.743761 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.743945 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.744109 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.744230 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.847733 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.847828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.847845 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.847869 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.847885 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.863193 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:53 crc kubenswrapper[4961]: E1205 17:33:53.863385 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.950534 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.950586 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.950601 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.950623 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:53 crc kubenswrapper[4961]: I1205 17:33:53.950637 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:53Z","lastTransitionTime":"2025-12-05T17:33:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.053506 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.053590 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.053603 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.053627 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.053640 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.157214 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.157272 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.157289 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.157313 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.157332 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.260240 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.260287 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.260299 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.260317 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.260329 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.363488 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.363562 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.363582 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.363610 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.363628 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.448609 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:54 crc kubenswrapper[4961]: E1205 17:33:54.448797 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:54 crc kubenswrapper[4961]: E1205 17:33:54.448854 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:33:58.448840543 +0000 UTC m=+44.509991006 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.466725 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.466768 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.466801 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.466818 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.466830 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.569439 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.569487 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.569500 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.569520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.569532 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.673432 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.673488 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.673508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.673538 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.673557 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.776590 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.776626 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.776637 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.776650 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.776660 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.862934 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.863018 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.863137 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:54 crc kubenswrapper[4961]: E1205 17:33:54.863419 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:54 crc kubenswrapper[4961]: E1205 17:33:54.863595 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:54 crc kubenswrapper[4961]: E1205 17:33:54.863710 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.878913 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.878994 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.879007 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.879029 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.879042 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.885012 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.901376 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.916309 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.933710 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.946852 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.977182 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.982081 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.982171 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.982188 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.982227 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.982243 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:54Z","lastTransitionTime":"2025-12-05T17:33:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:54 crc kubenswrapper[4961]: I1205 17:33:54.997657 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:54Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.015262 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.030147 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.048128 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.066357 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.082029 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.085094 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.085142 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.085160 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.085187 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.085205 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.101752 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.120917 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.134229 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.148882 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:33:55Z is after 2025-08-24T17:21:41Z" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.187732 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.187808 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.187820 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.187842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.187858 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.290571 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.290616 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.290628 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.290646 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.290657 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.392965 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.393007 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.393018 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.393038 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.393049 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.495381 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.495414 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.495422 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.495437 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.495448 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.599243 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.599305 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.599317 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.599339 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.599360 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.702167 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.702213 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.702223 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.702239 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.702249 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.805902 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.805986 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.806001 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.806018 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.806027 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.862855 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:55 crc kubenswrapper[4961]: E1205 17:33:55.863086 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.909596 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.909651 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.909663 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.909682 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:55 crc kubenswrapper[4961]: I1205 17:33:55.909693 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:55Z","lastTransitionTime":"2025-12-05T17:33:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.012927 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.012995 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.013008 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.013034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.013048 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.115374 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.115433 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.115449 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.115472 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.115493 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.218380 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.218484 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.218500 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.218526 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.218539 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.322320 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.322402 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.322429 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.322460 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.322484 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.425655 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.425722 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.425733 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.425753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.425766 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.529326 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.529390 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.529405 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.529426 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.529441 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.632942 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.632994 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.633009 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.633027 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.633043 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.736690 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.736805 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.736826 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.736854 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.736873 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.839973 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.840025 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.840036 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.840052 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.840064 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.863032 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.863113 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.863274 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:56 crc kubenswrapper[4961]: E1205 17:33:56.863286 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:56 crc kubenswrapper[4961]: E1205 17:33:56.863395 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:56 crc kubenswrapper[4961]: E1205 17:33:56.863550 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.943212 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.943306 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.943362 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.943382 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:56 crc kubenswrapper[4961]: I1205 17:33:56.943412 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:56Z","lastTransitionTime":"2025-12-05T17:33:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.046311 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.046388 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.046406 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.046429 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.046454 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.148676 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.148730 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.148744 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.148763 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.148793 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.251508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.251558 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.251570 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.251587 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.251600 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.354450 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.354508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.354525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.354545 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.354559 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.457630 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.457691 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.457705 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.457724 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.457738 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.560019 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.560070 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.560080 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.560099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.560111 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.664421 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.664491 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.664505 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.664533 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.664545 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.766941 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.766989 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.766999 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.767014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.767026 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.863377 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:57 crc kubenswrapper[4961]: E1205 17:33:57.863561 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.869001 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.869034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.869047 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.869060 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.869070 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.971417 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.971456 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.971465 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.971509 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:57 crc kubenswrapper[4961]: I1205 17:33:57.971525 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:57Z","lastTransitionTime":"2025-12-05T17:33:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.073649 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.073708 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.073726 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.073750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.073767 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.176574 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.176642 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.176661 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.176684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.176704 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.279619 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.279685 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.279696 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.279715 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.279728 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.382562 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.382620 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.382629 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.382649 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.382659 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.484703 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.484749 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.484761 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.484801 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.484820 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.494756 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:58 crc kubenswrapper[4961]: E1205 17:33:58.494902 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:58 crc kubenswrapper[4961]: E1205 17:33:58.494964 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:06.494945731 +0000 UTC m=+52.556096204 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.587667 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.587737 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.587761 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.587828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.587857 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.691598 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.691665 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.691678 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.691698 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.691715 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.794525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.794581 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.794594 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.794615 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.794629 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.862427 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.862535 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:33:58 crc kubenswrapper[4961]: E1205 17:33:58.862594 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:33:58 crc kubenswrapper[4961]: E1205 17:33:58.862703 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.862823 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:33:58 crc kubenswrapper[4961]: E1205 17:33:58.862889 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.897342 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.897817 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.898003 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.898163 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:58 crc kubenswrapper[4961]: I1205 17:33:58.898346 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:58Z","lastTransitionTime":"2025-12-05T17:33:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.001296 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.001330 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.001339 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.001354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.001364 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.104269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.104327 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.104347 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.104373 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.104393 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.207289 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.207349 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.207367 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.207391 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.207408 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.310231 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.310271 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.310288 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.310304 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.310315 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.412939 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.413015 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.413040 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.413071 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.413093 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.516048 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.516446 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.516549 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.516658 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.516751 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.619293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.619377 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.619389 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.619408 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.619420 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.723324 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.723396 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.723419 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.723448 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.723472 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.827281 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.827365 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.827392 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.827423 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.827445 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.863009 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:33:59 crc kubenswrapper[4961]: E1205 17:33:59.863212 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.930635 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.930718 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.930743 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.930814 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:33:59 crc kubenswrapper[4961]: I1205 17:33:59.930841 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:33:59Z","lastTransitionTime":"2025-12-05T17:33:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.034430 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.034515 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.034539 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.034572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.034598 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.137713 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.137770 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.137804 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.137823 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.137837 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.240725 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.240822 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.240840 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.240858 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.240871 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.344317 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.344385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.344406 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.344433 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.344452 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.447974 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.448035 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.448054 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.448080 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.448097 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.551467 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.551599 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.551628 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.551660 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.551689 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.654274 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.654363 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.654385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.654412 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.654431 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.757287 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.757377 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.757401 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.757434 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.757461 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.860569 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.860634 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.860659 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.860687 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.860709 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.863050 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.863151 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:00 crc kubenswrapper[4961]: E1205 17:34:00.863255 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.863279 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:00 crc kubenswrapper[4961]: E1205 17:34:00.863413 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:00 crc kubenswrapper[4961]: E1205 17:34:00.863497 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.963231 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.963305 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.963318 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.963362 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:00 crc kubenswrapper[4961]: I1205 17:34:00.963381 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:00Z","lastTransitionTime":"2025-12-05T17:34:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.066552 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.066617 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.066640 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.066674 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.066696 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.169581 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.169645 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.169661 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.169688 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.169706 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.272546 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.272593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.272600 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.272616 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.272626 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.376058 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.376125 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.376142 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.376175 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.376197 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.480610 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.480674 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.480689 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.480742 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.480764 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.584613 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.584684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.584702 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.584727 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.584745 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.687236 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.687305 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.687326 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.687354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.687373 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.728483 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.728523 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.728533 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.728548 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.728558 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.748894 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.753684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.753736 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.753750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.753770 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.753802 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.770613 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.775411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.775465 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.775477 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.775496 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.775512 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.794769 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.799173 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.799249 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.799269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.799291 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.799348 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.816139 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.820768 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.820906 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.820931 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.820966 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.820989 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.843335 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:01Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.843525 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.845585 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.845628 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.845641 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.845659 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.845674 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.863252 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:01 crc kubenswrapper[4961]: E1205 17:34:01.863488 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.948279 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.948313 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.948322 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.948338 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:01 crc kubenswrapper[4961]: I1205 17:34:01.948348 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:01Z","lastTransitionTime":"2025-12-05T17:34:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.050313 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.050354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.050364 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.050379 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.050392 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.152521 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.152574 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.152583 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.152602 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.152612 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.256071 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.256132 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.256148 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.256170 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.256183 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.358828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.358881 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.358902 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.358929 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.358947 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.461910 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.461962 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.461975 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.461994 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.462006 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.564468 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.564534 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.564544 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.564562 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.564580 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.667332 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.667380 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.667392 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.667413 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.667427 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.770256 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.770340 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.770358 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.770383 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.770428 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.863300 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.863343 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.863409 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:02 crc kubenswrapper[4961]: E1205 17:34:02.863439 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:02 crc kubenswrapper[4961]: E1205 17:34:02.863506 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:02 crc kubenswrapper[4961]: E1205 17:34:02.863638 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.871826 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.871862 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.871871 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.871883 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.871893 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.976172 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.976241 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.976263 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.976293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:02 crc kubenswrapper[4961]: I1205 17:34:02.976329 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:02Z","lastTransitionTime":"2025-12-05T17:34:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.080540 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.080595 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.080607 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.080629 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.080645 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.183873 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.183970 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.183997 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.184032 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.184065 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.287114 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.287187 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.287201 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.287224 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.287237 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.389800 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.389849 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.389858 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.389875 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.389894 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.493095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.493136 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.493147 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.493165 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.493177 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.596476 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.596542 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.596555 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.596572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.596584 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.699231 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.699263 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.699274 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.699289 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.699301 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.802538 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.802593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.802607 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.802626 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.802640 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.862635 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:03 crc kubenswrapper[4961]: E1205 17:34:03.862881 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.905823 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.905883 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.905896 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.905918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:03 crc kubenswrapper[4961]: I1205 17:34:03.905930 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:03Z","lastTransitionTime":"2025-12-05T17:34:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.008125 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.008181 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.008192 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.008213 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.008226 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.112006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.112065 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.112087 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.112113 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.112131 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.215408 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.215480 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.215499 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.215527 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.215547 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.318849 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.318930 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.318964 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.318991 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.319010 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.422527 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.422577 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.422596 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.422620 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.422638 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.525355 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.525432 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.525451 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.525478 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.525496 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.629262 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.629315 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.629332 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.629357 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.629375 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.733205 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.733685 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.733901 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.734040 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.734214 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.838430 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.838522 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.838535 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.838573 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.838590 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.862989 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.863115 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.863360 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:04 crc kubenswrapper[4961]: E1205 17:34:04.863635 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:04 crc kubenswrapper[4961]: E1205 17:34:04.863754 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:04 crc kubenswrapper[4961]: E1205 17:34:04.863982 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.864136 4961 scope.go:117] "RemoveContainer" containerID="e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.883390 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:04Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.908339 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:04Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.925526 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:04Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.941302 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.941360 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.941374 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.941397 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.941410 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:04Z","lastTransitionTime":"2025-12-05T17:34:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.942508 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:04Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.963577 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://be74575c03a8e9581461d25726af09dc544f4f4cdeea4651fa96677a0b17aa2f\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"message\\\":\\\"6320 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1205 17:33:45.953181 6320 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1205 17:33:45.953190 6320 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953213 6320 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1205 17:33:45.953216 6320 handler.go:208] Removed *v1.Node event handler 2\\\\nI1205 17:33:45.953221 6320 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI1205 17:33:45.953237 6320 factory.go:656] Stopping watch factory\\\\nI1205 17:33:45.953249 6320 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI1205 17:33:45.953259 6320 handler.go:208] Removed *v1.Node event handler 7\\\\nI1205 17:33:45.953284 6320 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953356 6320 reflector.go:311] Stopping reflector *v1.Namespace (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1205 17:33:45.953985 6320 reflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/f\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:04Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:04 crc kubenswrapper[4961]: I1205 17:34:04.983472 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:04Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.003981 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.025018 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.045479 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.045544 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.045564 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.045592 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.045609 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.046663 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.066307 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.089166 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.108318 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.124799 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.138982 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.148550 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.148580 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.148591 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.148612 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.148626 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.154953 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.168658 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.181599 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.194861 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.210732 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.225409 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.238560 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.251880 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.251926 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.251940 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.251961 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.251987 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.252130 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.266586 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.283812 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.297697 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.321296 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.337872 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.354730 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.355664 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.355721 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.355735 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.355759 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.355794 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.380792 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.406455 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.421323 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.433966 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.459166 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.459208 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.459221 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.459238 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.459250 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.521650 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/1.log" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.525190 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.525648 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.539222 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.561445 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.563198 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.563241 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.563253 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.563269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.563279 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.577149 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.590767 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.606434 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.621140 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.639163 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.652354 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.665691 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.665750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.665764 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.665824 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.665838 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.667621 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.682367 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.697924 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.715504 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.731273 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.743834 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.759765 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.768377 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.768437 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.768455 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.768485 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.768505 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.774639 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:05Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.863558 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:05 crc kubenswrapper[4961]: E1205 17:34:05.863748 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.871579 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.871616 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.871625 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.871641 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.871651 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.974662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.974713 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.974728 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.974750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:05 crc kubenswrapper[4961]: I1205 17:34:05.974763 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:05Z","lastTransitionTime":"2025-12-05T17:34:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.077702 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.077753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.077764 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.077802 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.077815 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.181005 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.181053 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.181065 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.181083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.181095 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.283991 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.284054 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.284071 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.284096 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.284115 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.387079 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.387134 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.387153 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.387178 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.387193 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.489601 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.489651 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.489660 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.489680 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.489696 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.531651 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/2.log" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.532378 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/1.log" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.535392 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7" exitCode=1 Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.535449 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.535506 4961 scope.go:117] "RemoveContainer" containerID="e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.536085 4961 scope.go:117] "RemoveContainer" containerID="fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.536279 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.551676 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.568643 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.589153 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.589461 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.589611 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.589664 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:22.589648193 +0000 UTC m=+68.650798676 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.591786 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.591825 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.591883 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.591908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.591974 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.606472 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.624657 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.641343 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.656823 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.673238 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.685011 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.690705 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.690857 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.690974 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.691051 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:38.691025571 +0000 UTC m=+84.752176044 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.691127 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.691235 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:38.691206745 +0000 UTC m=+84.752357418 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.698810 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.698864 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.698878 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.698908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.698925 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.701126 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.717417 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.734108 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.748813 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.763540 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.775553 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.792032 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792250 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:34:38.792214734 +0000 UTC m=+84.853365227 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.792316 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.792393 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792516 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792544 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792562 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792605 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:38.792595353 +0000 UTC m=+84.853745836 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792516 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792635 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792646 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.792682 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:38.792674315 +0000 UTC m=+84.853824788 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.800262 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:06Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.801340 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.801364 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.801374 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.801389 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.801398 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.862824 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.862927 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.862824 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.863052 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.863127 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:06 crc kubenswrapper[4961]: E1205 17:34:06.863305 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.904000 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.904094 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.904122 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.904152 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:06 crc kubenswrapper[4961]: I1205 17:34:06.904175 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:06Z","lastTransitionTime":"2025-12-05T17:34:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.006627 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.006662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.006671 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.006684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.006693 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.009362 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.021233 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.030367 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.049658 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.066878 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.096269 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e40bdb4f5cbf7a2b99407cdc461f88277d536b13434064d9b4406da02b952b93\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"message\\\":\\\"s/multus-additional-cni-plugins-sxfzb openshift-network-diagnostics/network-check-target-xd92c openshift-network-operator/iptables-alerter-4ln5h openshift-machine-config-operator/machine-config-daemon-4vc27 openshift-network-diagnostics/network-check-source-55646444c4-trplf]\\\\nI1205 17:33:48.093648 6487 obj_retry.go:418] Waiting for all the *v1.Pod retry setup to complete in iterateRetryResources\\\\nI1205 17:33:48.093661 6487 obj_retry.go:303] Retry object setup: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093671 6487 obj_retry.go:365] Adding new object: *v1.Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf\\\\nI1205 17:33:48.093679 6487 ovn.go:134] Ensuring zone local for Pod openshift-network-diagnostics/network-check-source-55646444c4-trplf in node crc\\\\nF1205 17:33:48.093704 6487 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annot\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:47Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.109283 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.109325 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.109334 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.109348 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.109360 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.115873 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.132222 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.146894 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.164064 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.180313 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.194806 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.208387 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.212060 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.212087 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.212097 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.212112 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.212123 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.221100 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.231537 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.241459 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.254486 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.267816 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.315129 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.315181 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.315194 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.315212 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.315226 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.417760 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.417833 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.417846 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.417867 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.417882 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.520908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.520953 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.520990 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.521014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.521026 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.542342 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/2.log" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.549513 4961 scope.go:117] "RemoveContainer" containerID="fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7" Dec 05 17:34:07 crc kubenswrapper[4961]: E1205 17:34:07.549710 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.563896 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.581586 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.595189 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.609380 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.621659 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.623259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.623335 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.623349 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.623372 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.623407 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.637021 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.648584 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.669508 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.683991 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.697598 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.712127 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.726409 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.726476 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.726488 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.726502 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.726513 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.727384 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.741030 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.758519 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.773200 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.784737 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.796642 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:07Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.828831 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.828899 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.828918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.828941 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.828952 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.863509 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:07 crc kubenswrapper[4961]: E1205 17:34:07.863681 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.931766 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.931834 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.931847 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.931867 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:07 crc kubenswrapper[4961]: I1205 17:34:07.931879 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:07Z","lastTransitionTime":"2025-12-05T17:34:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.034715 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.034842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.034872 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.034907 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.034934 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.137961 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.138432 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.138446 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.138463 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.138475 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.241984 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.242082 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.242107 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.242144 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.242168 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.344528 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.344564 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.344575 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.344589 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.344599 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.447297 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.447348 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.447357 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.447373 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.447383 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.550938 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.551062 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.551086 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.551112 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.551130 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.653766 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.653834 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.653847 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.653864 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.653876 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.756814 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.756857 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.756867 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.756881 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.756891 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.861704 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.861772 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.861833 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.861865 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.861889 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.862757 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.862766 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.862766 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:08 crc kubenswrapper[4961]: E1205 17:34:08.862900 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:08 crc kubenswrapper[4961]: E1205 17:34:08.862955 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:08 crc kubenswrapper[4961]: E1205 17:34:08.863132 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.964986 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.965061 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.965075 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.965099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:08 crc kubenswrapper[4961]: I1205 17:34:08.965115 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:08Z","lastTransitionTime":"2025-12-05T17:34:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.068068 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.068121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.068134 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.068152 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.068164 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.171194 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.171262 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.171276 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.171299 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.171316 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.274339 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.274405 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.274424 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.274450 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.274504 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.377413 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.377459 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.377474 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.377492 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.377505 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.481028 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.481089 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.481105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.481125 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.481140 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.584026 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.584087 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.584095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.584111 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.584125 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.687409 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.687474 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.687489 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.687511 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.687527 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.790644 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.790726 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.790738 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.790757 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.790769 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.863015 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:09 crc kubenswrapper[4961]: E1205 17:34:09.863274 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.893497 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.893561 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.893583 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.893610 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.893629 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.997014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.997105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.997127 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.997157 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:09 crc kubenswrapper[4961]: I1205 17:34:09.997177 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:09Z","lastTransitionTime":"2025-12-05T17:34:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.100360 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.100406 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.100417 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.100433 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.100449 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.204079 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.204147 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.204160 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.204182 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.204197 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.307121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.307199 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.307278 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.307361 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.307392 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.410697 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.410810 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.410838 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.410873 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.410900 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.513336 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.513406 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.513430 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.513461 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.513485 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.617039 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.617105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.617117 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.617140 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.617154 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.720449 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.720514 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.720526 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.720549 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.720567 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.823416 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.823469 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.823479 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.823500 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.823512 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.863524 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.863568 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.863729 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:10 crc kubenswrapper[4961]: E1205 17:34:10.863917 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:10 crc kubenswrapper[4961]: E1205 17:34:10.864175 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:10 crc kubenswrapper[4961]: E1205 17:34:10.864229 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.926087 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.926127 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.926139 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.926159 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:10 crc kubenswrapper[4961]: I1205 17:34:10.926173 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:10Z","lastTransitionTime":"2025-12-05T17:34:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.028938 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.028991 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.029004 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.029022 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.029034 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.131980 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.132042 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.132057 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.132076 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.132087 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.234854 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.234905 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.234918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.234936 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.234949 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.338195 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.338258 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.338272 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.338293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.338313 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.441246 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.441297 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.441310 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.441327 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.441340 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.544155 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.544199 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.544212 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.544231 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.544243 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.647437 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.647485 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.647498 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.647522 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.647536 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.750698 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.750731 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.750741 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.750760 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.750803 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.850638 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.850740 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.850760 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.850814 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.850841 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.863659 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.865151 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.874343 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.880324 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.880572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.880585 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.880606 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.880617 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.902276 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.906700 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.906811 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.906828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.906851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.906865 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.924004 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.928711 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.928753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.928765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.928805 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.928818 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.945104 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.950994 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.951074 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.951095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.951124 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.951143 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.967914 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:11Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:11 crc kubenswrapper[4961]: E1205 17:34:11.968099 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.970154 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.970197 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.970209 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.970230 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:11 crc kubenswrapper[4961]: I1205 17:34:11.970243 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:11Z","lastTransitionTime":"2025-12-05T17:34:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.073080 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.073190 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.073204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.073222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.073233 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.176381 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.176414 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.176423 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.176440 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.176448 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.279757 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.279837 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.279851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.279873 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.279891 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.382687 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.382727 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.382735 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.382752 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.382762 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.485726 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.485805 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.485818 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.485836 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.485849 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.588163 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.588201 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.588209 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.588223 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.588231 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.692039 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.692105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.692121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.692141 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.692152 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.795411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.795474 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.795493 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.795520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.795539 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.863089 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.863150 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:12 crc kubenswrapper[4961]: E1205 17:34:12.863278 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:12 crc kubenswrapper[4961]: E1205 17:34:12.863482 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.863313 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:12 crc kubenswrapper[4961]: E1205 17:34:12.863939 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.898667 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.898742 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.898762 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.899119 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:12 crc kubenswrapper[4961]: I1205 17:34:12.899398 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:12Z","lastTransitionTime":"2025-12-05T17:34:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.002069 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.002138 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.002169 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.002202 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.002228 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.105027 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.105117 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.105145 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.105177 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.105202 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.208246 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.208313 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.208331 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.208357 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.208375 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.310931 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.311259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.311331 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.311424 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.311505 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.415198 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.415282 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.415304 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.415335 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.415358 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.519332 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.519438 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.519485 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.519525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.519554 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.623103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.623188 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.623220 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.623259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.623291 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.727131 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.727699 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.727884 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.728037 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.728176 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.831702 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.831766 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.831814 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.831842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.831863 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.863573 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:13 crc kubenswrapper[4961]: E1205 17:34:13.863904 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.935459 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.935528 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.935563 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.935593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:13 crc kubenswrapper[4961]: I1205 17:34:13.935615 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:13Z","lastTransitionTime":"2025-12-05T17:34:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.038918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.038985 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.038998 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.039018 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.039030 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.142148 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.142193 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.142203 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.142220 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.142232 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.244976 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.245017 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.245030 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.245048 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.245061 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.348486 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.348541 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.348554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.348573 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.348586 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.451830 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.451875 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.451888 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.451908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.451920 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.554990 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.555049 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.555063 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.555086 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.555100 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.658272 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.658327 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.658340 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.658359 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.658374 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.761328 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.761387 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.761403 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.761429 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.761443 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.862496 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.862523 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.862592 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:14 crc kubenswrapper[4961]: E1205 17:34:14.862633 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:14 crc kubenswrapper[4961]: E1205 17:34:14.862860 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:14 crc kubenswrapper[4961]: E1205 17:34:14.863052 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.863911 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.863964 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.863983 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.864006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.864023 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.881859 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.898301 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.911640 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.928655 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.947038 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.964864 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.966218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.966256 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.966268 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.966287 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.966300 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:14Z","lastTransitionTime":"2025-12-05T17:34:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:14 crc kubenswrapper[4961]: I1205 17:34:14.993404 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:14Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.008708 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.027060 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.043948 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.060342 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.068873 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.069197 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.069285 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.069378 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.069462 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.075558 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.094515 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.108862 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.125581 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.140213 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.155882 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:15Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.172067 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.172440 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.172572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.172685 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.172802 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.275501 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.275567 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.275577 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.275595 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.275606 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.378240 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.378333 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.378374 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.378392 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.378403 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.481494 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.481558 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.481571 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.481593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.481610 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.584610 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.585120 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.585135 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.585161 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.585178 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.687662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.687718 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.687732 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.687755 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.687767 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.791123 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.791186 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.791197 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.791217 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.791230 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.863380 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:15 crc kubenswrapper[4961]: E1205 17:34:15.863549 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.893946 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.894293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.894386 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.894469 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.894543 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.998122 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.998170 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.998182 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.998202 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:15 crc kubenswrapper[4961]: I1205 17:34:15.998214 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:15Z","lastTransitionTime":"2025-12-05T17:34:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.100681 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.101218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.101323 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.101437 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.101524 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.204156 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.204480 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.204554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.204635 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.204745 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.307131 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.307175 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.307188 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.307206 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.307220 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.410127 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.410175 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.410188 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.410208 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.410223 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.513206 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.513259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.513276 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.513297 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.513332 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.616080 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.616135 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.616151 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.616173 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.616189 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.718520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.718550 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.718559 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.718573 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.718582 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.821890 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.821956 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.821969 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.821990 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.822004 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.862853 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.862908 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:16 crc kubenswrapper[4961]: E1205 17:34:16.863026 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:16 crc kubenswrapper[4961]: E1205 17:34:16.863123 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.863405 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:16 crc kubenswrapper[4961]: E1205 17:34:16.863480 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.924213 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.924251 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.924261 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.924278 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:16 crc kubenswrapper[4961]: I1205 17:34:16.924289 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:16Z","lastTransitionTime":"2025-12-05T17:34:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.028195 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.028269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.028284 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.028301 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.028312 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.131590 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.131651 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.131682 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.131704 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.131719 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.234131 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.234184 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.234197 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.234216 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.234229 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.336769 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.336833 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.336846 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.336864 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.336875 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.439838 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.439886 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.439899 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.439920 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.439933 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.542325 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.542765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.542896 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.542992 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.543074 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.645882 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.645930 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.645944 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.645963 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.645976 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.748099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.748161 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.748179 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.748210 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.748227 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.851307 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.851352 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.851371 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.851388 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.851403 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.862597 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:17 crc kubenswrapper[4961]: E1205 17:34:17.862812 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.953919 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.953981 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.953991 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.954008 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:17 crc kubenswrapper[4961]: I1205 17:34:17.954020 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:17Z","lastTransitionTime":"2025-12-05T17:34:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.058759 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.058853 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.058876 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.058904 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.058923 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.161737 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.161812 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.161827 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.161845 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.161858 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.265121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.265193 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.265204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.265223 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.265234 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.367768 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.367830 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.367839 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.367855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.367866 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.470008 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.470090 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.470103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.470120 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.470133 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.572523 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.572571 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.572583 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.572602 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.572616 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.675500 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.675544 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.675558 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.675578 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.675591 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.778382 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.778434 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.778447 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.778466 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.778480 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.863351 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:18 crc kubenswrapper[4961]: E1205 17:34:18.863513 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.863720 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:18 crc kubenswrapper[4961]: E1205 17:34:18.863829 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.864142 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:18 crc kubenswrapper[4961]: E1205 17:34:18.864400 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.881113 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.881166 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.881178 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.881195 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.881207 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.984204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.984257 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.984265 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.984286 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:18 crc kubenswrapper[4961]: I1205 17:34:18.984296 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:18Z","lastTransitionTime":"2025-12-05T17:34:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.087151 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.087199 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.087214 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.087235 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.087251 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.190186 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.190241 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.190252 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.190273 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.190284 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.294070 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.294117 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.294127 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.294144 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.294155 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.397218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.397280 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.397294 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.397312 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.397324 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.500095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.500161 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.500179 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.500218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.500234 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.603349 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.603424 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.603449 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.603479 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.603525 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.706273 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.706323 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.706346 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.706366 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.706378 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.809368 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.809408 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.809420 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.809439 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.809451 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.863567 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:19 crc kubenswrapper[4961]: E1205 17:34:19.863754 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.911411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.911454 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.911464 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.911481 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:19 crc kubenswrapper[4961]: I1205 17:34:19.911491 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:19Z","lastTransitionTime":"2025-12-05T17:34:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.014462 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.014518 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.014531 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.014554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.014580 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.117192 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.117246 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.117257 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.117278 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.117293 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.220398 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.220489 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.220502 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.220525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.220540 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.323056 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.323100 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.323115 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.323134 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.323147 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.425973 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.426056 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.426075 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.426102 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.426120 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.528682 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.528725 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.528741 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.528758 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.528770 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.630903 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.630947 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.630961 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.630978 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.631003 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.733851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.733913 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.733926 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.733944 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.733958 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.836709 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.836765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.836793 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.836815 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.836827 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.862893 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:20 crc kubenswrapper[4961]: E1205 17:34:20.863060 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.863161 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:20 crc kubenswrapper[4961]: E1205 17:34:20.863338 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.863349 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:20 crc kubenswrapper[4961]: E1205 17:34:20.863576 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.938768 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.938839 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.938885 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.938910 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:20 crc kubenswrapper[4961]: I1205 17:34:20.938920 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:20Z","lastTransitionTime":"2025-12-05T17:34:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.041347 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.041388 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.041399 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.041416 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.041426 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.144358 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.144425 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.144441 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.144466 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.144480 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.247410 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.247460 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.247490 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.247512 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.247526 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.350946 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.351000 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.351009 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.351030 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.351043 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.454425 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.454495 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.454512 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.454535 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.454552 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.557153 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.557205 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.557221 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.557241 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.557254 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.659434 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.659468 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.659477 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.659493 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.659502 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.762720 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.762765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.762806 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.762827 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.762839 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.862996 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:21 crc kubenswrapper[4961]: E1205 17:34:21.863154 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.864005 4961 scope.go:117] "RemoveContainer" containerID="fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7" Dec 05 17:34:21 crc kubenswrapper[4961]: E1205 17:34:21.864185 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.906155 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.906202 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.906216 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.906234 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:21 crc kubenswrapper[4961]: I1205 17:34:21.906346 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:21Z","lastTransitionTime":"2025-12-05T17:34:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.009153 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.009200 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.009214 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.009235 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.009249 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.111944 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.112000 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.112014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.112034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.112051 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.215199 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.215260 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.215271 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.215285 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.215296 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.265233 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.265275 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.265285 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.265308 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.265320 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.283086 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:22Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.288063 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.288121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.288134 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.288154 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.288167 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.302422 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:22Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.307140 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.307201 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.307219 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.307242 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.307261 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.324913 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:22Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.329348 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.329385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.329396 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.329412 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.329422 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.343554 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:22Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.348933 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.348969 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.348979 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.348994 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.349006 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.363381 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:22Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.363597 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.365516 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.365557 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.365570 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.365588 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.365601 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.468482 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.468525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.468535 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.468552 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.468566 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.571342 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.571414 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.571431 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.571455 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.571472 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.667564 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.667832 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.667965 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:34:54.66793488 +0000 UTC m=+100.729085393 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.673955 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.674165 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.674257 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.674354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.674449 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.777645 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.777818 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.777841 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.777907 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.777929 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.863389 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.863450 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.863475 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.864377 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.864484 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:22 crc kubenswrapper[4961]: E1205 17:34:22.864551 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.881528 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.881575 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.881587 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.881605 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.881618 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.983925 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.983978 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.984000 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.984024 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:22 crc kubenswrapper[4961]: I1205 17:34:22.984040 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:22Z","lastTransitionTime":"2025-12-05T17:34:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.086807 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.086855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.086863 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.086879 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.086889 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.189529 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.189639 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.189662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.189695 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.189837 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.293084 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.293137 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.293146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.293161 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.293188 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.395823 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.395871 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.395881 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.395899 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.395908 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.515185 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.515235 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.515248 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.515269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.515281 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.617900 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.617946 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.617956 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.617973 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.617984 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.721028 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.721060 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.721069 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.721083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.721092 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.823227 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.823260 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.823270 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.823283 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.823292 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.862522 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:23 crc kubenswrapper[4961]: E1205 17:34:23.862698 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.925819 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.925883 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.925904 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.925932 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:23 crc kubenswrapper[4961]: I1205 17:34:23.925949 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:23Z","lastTransitionTime":"2025-12-05T17:34:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.028315 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.028806 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.028976 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.029186 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.029413 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.132511 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.132572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.132584 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.132606 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.132620 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.235630 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.235696 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.235710 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.235730 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.235742 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.338506 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.338547 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.338560 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.338577 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.338593 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.441751 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.441960 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.441975 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.441995 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.442005 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.544087 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.544934 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.545102 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.545133 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.545149 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.609770 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/0.log" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.609893 4961 generic.go:334] "Generic (PLEG): container finished" podID="26618630-1782-4ae8-af12-6f913fbddf5b" containerID="99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4" exitCode=1 Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.609944 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerDied","Data":"99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.610483 4961 scope.go:117] "RemoveContainer" containerID="99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.630937 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.649124 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.650670 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.650698 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.650712 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.650729 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.650740 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.665846 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.684921 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.706926 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.719853 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.741963 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.754188 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.754246 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.754298 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.754316 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.754328 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.761012 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.780338 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.794061 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.809430 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.825889 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.839217 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.852474 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.862462 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.862525 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.862540 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.862559 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.862571 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.863159 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:24 crc kubenswrapper[4961]: E1205 17:34:24.863263 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.863428 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:24 crc kubenswrapper[4961]: E1205 17:34:24.863489 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.863853 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:24 crc kubenswrapper[4961]: E1205 17:34:24.863920 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.873472 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.890110 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.905423 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.923576 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.937226 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.952960 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.966631 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.966737 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.966751 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.966816 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.966828 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:24Z","lastTransitionTime":"2025-12-05T17:34:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.972693 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:24 crc kubenswrapper[4961]: I1205 17:34:24.989427 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:24Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.014830 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.031198 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.048059 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.064906 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.069729 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.069793 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.069806 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.069824 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.069837 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.080174 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.094434 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.111838 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.129566 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:24Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.141943 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.159361 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.172146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.172177 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.172204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.172220 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.172231 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.175111 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.190232 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.275758 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.275846 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.275862 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.275887 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.275901 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.377933 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.377983 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.377998 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.378022 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.378036 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.480731 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.480817 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.480828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.480851 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.480866 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.583665 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.583718 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.583728 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.583762 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.583793 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.614966 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/0.log" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.615032 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerStarted","Data":"af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.634834 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.655482 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.669755 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.685432 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.687349 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.687385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.687395 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.687411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.687421 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.701918 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.716504 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.728299 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.741068 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.753076 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.766264 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.780999 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.789633 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.789701 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.789716 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.789733 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.789747 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.798532 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.814661 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.828852 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.844966 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.863086 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:25 crc kubenswrapper[4961]: E1205 17:34:25.863250 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.863394 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.890162 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:25Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.893001 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.893037 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.893051 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.893070 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.893082 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.995980 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.996028 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.996041 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.996061 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:25 crc kubenswrapper[4961]: I1205 17:34:25.996075 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:25Z","lastTransitionTime":"2025-12-05T17:34:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.098984 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.099024 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.099035 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.099052 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.099064 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.202095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.202169 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.202237 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.202265 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.202280 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.304532 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.304580 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.304593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.304612 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.304624 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.407245 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.407679 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.407821 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.407955 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.408055 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.510726 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.510761 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.510809 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.510836 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.510849 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.614050 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.614122 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.614142 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.614168 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.614189 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.716580 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.716622 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.716632 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.716648 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.716661 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.819803 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.819848 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.819860 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.819881 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.819894 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.863352 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.863352 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:26 crc kubenswrapper[4961]: E1205 17:34:26.863509 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.863592 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:26 crc kubenswrapper[4961]: E1205 17:34:26.863672 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:26 crc kubenswrapper[4961]: E1205 17:34:26.863734 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.923236 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.923505 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.923566 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.923687 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:26 crc kubenswrapper[4961]: I1205 17:34:26.923795 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:26Z","lastTransitionTime":"2025-12-05T17:34:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.025904 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.025953 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.025964 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.025981 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.025991 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.129121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.129151 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.129159 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.129173 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.129182 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.231937 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.231976 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.231987 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.232006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.232021 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.334439 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.334489 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.334499 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.334517 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.334526 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.436589 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.436633 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.436642 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.436658 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.436670 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.539685 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.539729 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.539740 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.539758 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.539771 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.642492 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.642542 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.642554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.642569 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.642581 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.745304 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.745354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.745368 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.745385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.745397 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.847897 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.847951 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.847963 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.847988 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.847998 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.862695 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:27 crc kubenswrapper[4961]: E1205 17:34:27.862882 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.950817 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.950883 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.950899 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.950921 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:27 crc kubenswrapper[4961]: I1205 17:34:27.950935 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:27Z","lastTransitionTime":"2025-12-05T17:34:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.053410 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.053441 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.053453 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.053467 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.053476 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.156682 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.156753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.156771 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.156833 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.156850 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.270032 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.270103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.270123 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.270152 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.270175 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.373330 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.373406 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.373430 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.373458 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.373481 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.476405 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.476449 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.476461 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.476482 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.476496 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.579462 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.579508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.579520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.579537 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.579552 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.682841 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.682886 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.682897 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.682912 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.682923 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.785545 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.785590 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.785601 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.785619 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.785631 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.862666 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.862690 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.862871 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:28 crc kubenswrapper[4961]: E1205 17:34:28.862927 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:28 crc kubenswrapper[4961]: E1205 17:34:28.863076 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:28 crc kubenswrapper[4961]: E1205 17:34:28.863153 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.888417 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.888481 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.888498 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.888520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.888533 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.991217 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.991269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.991280 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.991299 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:28 crc kubenswrapper[4961]: I1205 17:34:28.991310 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:28Z","lastTransitionTime":"2025-12-05T17:34:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.094302 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.094333 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.094343 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.094358 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.094370 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.196734 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.196805 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.196819 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.196835 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.196848 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.298875 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.298940 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.298956 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.298979 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.298995 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.401615 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.401720 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.401750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.401830 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.401862 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.504908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.504959 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.504972 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.504991 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.505003 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.607633 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.607678 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.607690 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.607711 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.607723 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.710082 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.710134 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.710146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.710166 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.710177 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.813677 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.813721 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.813732 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.813753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.813767 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.862905 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:29 crc kubenswrapper[4961]: E1205 17:34:29.863091 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.917170 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.917250 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.917270 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.917291 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:29 crc kubenswrapper[4961]: I1205 17:34:29.917303 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:29Z","lastTransitionTime":"2025-12-05T17:34:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.021042 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.021109 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.021130 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.021155 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.021173 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.124608 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.125054 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.125273 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.125504 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.125684 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.229104 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.229543 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.229690 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.229912 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.230085 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.333213 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.333297 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.333322 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.333354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.333378 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.436578 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.436652 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.436672 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.436694 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.436707 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.540175 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.540233 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.540246 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.540267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.540280 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.642712 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.642762 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.642792 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.642809 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.642820 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.746259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.746337 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.746358 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.746385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.746403 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.849665 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.849740 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.849764 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.849822 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.849843 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.863393 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.863425 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.863609 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:30 crc kubenswrapper[4961]: E1205 17:34:30.864067 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:30 crc kubenswrapper[4961]: E1205 17:34:30.864192 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:30 crc kubenswrapper[4961]: E1205 17:34:30.863926 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.952554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.952606 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.952616 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.952634 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:30 crc kubenswrapper[4961]: I1205 17:34:30.952645 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:30Z","lastTransitionTime":"2025-12-05T17:34:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.056943 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.057011 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.057034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.057065 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.057086 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.160244 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.160327 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.160343 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.160370 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.160390 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.263445 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.263483 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.263494 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.263511 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.263523 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.366373 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.366450 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.366461 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.366480 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.366492 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.469145 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.469184 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.469199 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.469220 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.469234 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.571703 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.571813 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.571855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.571891 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.571923 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.680137 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.680253 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.680282 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.680319 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.680344 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.783956 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.784017 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.784035 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.784106 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.784129 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.863327 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:31 crc kubenswrapper[4961]: E1205 17:34:31.863506 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.886304 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.886329 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.886338 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.886352 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.886364 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.988012 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.988044 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.988053 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.988068 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:31 crc kubenswrapper[4961]: I1205 17:34:31.988078 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:31Z","lastTransitionTime":"2025-12-05T17:34:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.091357 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.091414 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.091429 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.091452 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.091467 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.194563 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.194625 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.194641 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.194663 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.194678 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.297750 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.297827 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.297839 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.297865 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.297879 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.401143 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.401210 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.401228 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.401258 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.401277 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.506197 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.506286 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.506301 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.506325 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.506338 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.531743 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.532072 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.532121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.532146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.532161 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.547583 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:32Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.552289 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.552373 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.552386 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.552404 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.552416 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.567397 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:32Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.571347 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.571384 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.571411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.571427 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.571437 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.586527 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:32Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.591377 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.591421 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.591443 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.591467 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.591481 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.604719 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:32Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.609571 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.609630 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.609646 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.609666 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.609682 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.627941 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:32Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.628096 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.630944 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.630979 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.630993 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.631010 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.631021 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.733316 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.733379 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.733388 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.733405 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.733420 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.837004 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.837063 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.837081 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.837105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.837123 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.863454 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.863455 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.863738 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.863639 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.863852 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:32 crc kubenswrapper[4961]: E1205 17:34:32.863969 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.940518 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.940569 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.940584 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.940609 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:32 crc kubenswrapper[4961]: I1205 17:34:32.940625 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:32Z","lastTransitionTime":"2025-12-05T17:34:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.043445 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.043499 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.043511 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.043535 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.043546 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.146765 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.146828 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.146841 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.146861 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.146877 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.249634 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.249692 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.249709 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.249732 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.249748 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.352747 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.352813 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.352827 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.352842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.352852 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.458874 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.458913 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.458937 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.458958 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.458974 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.561960 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.561995 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.562004 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.562033 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.562041 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.664671 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.664760 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.664826 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.664858 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.664884 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.767987 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.768069 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.768095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.768126 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.768150 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.863313 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:33 crc kubenswrapper[4961]: E1205 17:34:33.863502 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.872081 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.872165 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.872185 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.872212 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.872231 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.974728 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.974768 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.974793 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.974807 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:33 crc kubenswrapper[4961]: I1205 17:34:33.974818 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:33Z","lastTransitionTime":"2025-12-05T17:34:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.089648 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.089719 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.089749 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.090189 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.090284 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.192913 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.192967 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.192983 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.193003 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.193018 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.295194 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.295250 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.295263 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.295282 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.295294 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.397974 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.398026 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.398039 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.398060 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.398072 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.500014 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.500064 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.500076 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.500094 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.500106 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.602269 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.602326 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.602339 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.602361 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.602375 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.704671 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.704707 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.704719 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.704734 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.704746 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.806892 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.806959 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.806968 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.806984 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.806993 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.862954 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.863051 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:34 crc kubenswrapper[4961]: E1205 17:34:34.863148 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.863212 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:34 crc kubenswrapper[4961]: E1205 17:34:34.863306 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:34 crc kubenswrapper[4961]: E1205 17:34:34.863547 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.885705 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.901108 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.909113 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.909163 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.909176 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.909195 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.909207 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:34Z","lastTransitionTime":"2025-12-05T17:34:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.921299 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.937801 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.952516 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.967765 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.981079 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:34 crc kubenswrapper[4961]: I1205 17:34:34.994333 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:34Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.011520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.011567 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.011577 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.011593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.011609 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.012148 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.026412 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.038258 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.054123 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.079971 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.102388 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.114379 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.114434 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.114446 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.114465 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.114479 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.128029 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.146955 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.162264 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:35Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.216880 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.217184 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.217277 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.217345 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.217437 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.320637 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.320687 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.320710 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.320730 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.320743 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.423367 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.423420 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.423429 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.423444 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.423453 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.526870 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.526916 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.526938 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.526963 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.526978 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.629363 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.629437 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.629449 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.629470 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.629483 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.732088 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.732144 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.732154 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.732174 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.732185 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.834804 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.834862 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.834873 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.834895 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.834908 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.862696 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:35 crc kubenswrapper[4961]: E1205 17:34:35.862896 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.863856 4961 scope.go:117] "RemoveContainer" containerID="fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.937068 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.937113 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.937124 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.937140 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:35 crc kubenswrapper[4961]: I1205 17:34:35.937154 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:35Z","lastTransitionTime":"2025-12-05T17:34:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.041010 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.041370 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.041383 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.041403 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.041418 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.144310 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.144370 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.144393 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.144416 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.144431 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.248163 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.248208 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.248219 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.248236 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.248250 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.351325 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.351378 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.351391 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.351411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.351425 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.454281 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.454352 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.454365 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.454385 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.454395 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.557343 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.557384 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.557395 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.557411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.557425 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.660148 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.660213 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.660227 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.660250 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.660264 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.763381 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.763537 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.763552 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.763588 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.763601 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.862832 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.862844 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.862858 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:36 crc kubenswrapper[4961]: E1205 17:34:36.863476 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:36 crc kubenswrapper[4961]: E1205 17:34:36.867003 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:36 crc kubenswrapper[4961]: E1205 17:34:36.867272 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.870413 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.870644 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.871214 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.872402 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.872594 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.987743 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.987811 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.987822 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.987843 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:36 crc kubenswrapper[4961]: I1205 17:34:36.987855 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:36Z","lastTransitionTime":"2025-12-05T17:34:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.090182 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.090210 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.090218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.090233 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.090243 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.193101 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.193132 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.193142 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.193155 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.193165 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.295496 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.295527 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.295535 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.295550 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.295559 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.398007 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.398051 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.398064 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.398083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.398096 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.501813 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.501863 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.501874 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.501893 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.501906 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.605029 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.605083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.605094 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.605111 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.605123 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.654276 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/2.log" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.657130 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.657679 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.672672 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.689258 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.704024 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.708000 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.708053 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.708067 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.708085 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.708373 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.725044 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.742998 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.757702 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.773831 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.790185 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.811370 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.811415 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.811454 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.811478 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.811489 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.812945 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.833187 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.849092 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.863311 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:37 crc kubenswrapper[4961]: E1205 17:34:37.863452 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.864859 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.881724 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.892602 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.911770 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.914138 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.914176 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.914189 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.914208 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.914223 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:37Z","lastTransitionTime":"2025-12-05T17:34:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.931188 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:37 crc kubenswrapper[4961]: I1205 17:34:37.949572 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:37Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.017234 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.017279 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.017294 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.017314 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.017330 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.120345 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.120408 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.120420 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.120441 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.120461 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.223149 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.223191 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.223203 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.223222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.223234 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.326037 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.326085 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.326099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.326144 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.326156 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.428754 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.428842 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.428858 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.428883 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.428898 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.532395 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.532445 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.532457 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.532476 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.532490 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.634694 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.634758 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.634827 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.634854 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.634870 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.663523 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/3.log" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.664320 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/2.log" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.667411 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" exitCode=1 Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.667461 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.667713 4961 scope.go:117] "RemoveContainer" containerID="fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.668353 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.668546 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.687964 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.699619 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.713873 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.728805 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.737607 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.737635 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.737645 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.737658 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.737668 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.742466 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.744905 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.744942 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.745052 4961 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.745102 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.745090287 +0000 UTC m=+148.806240760 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.745098 4961 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.745181 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.74515427 +0000 UTC m=+148.806304803 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.758563 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.771810 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.784311 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.794063 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.804589 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.818994 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.833301 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.839898 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.840080 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.840162 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.840243 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.840303 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.846259 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.846396 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.846458 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.846432137 +0000 UTC m=+148.907582670 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.846531 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.846548 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.846559 4961 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.846599 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.846586281 +0000 UTC m=+148.907736754 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.846635 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.846995 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.847026 4961 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.847045 4961 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.847082 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.847071311 +0000 UTC m=+148.908221894 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.849323 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.862555 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.862696 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.862570 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.862555 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.862765 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:38 crc kubenswrapper[4961]: E1205 17:34:38.862933 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.863174 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.881024 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.893286 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.910836 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://fea80df3fb98d4ea7f85ad32d7d2bb20f720d499c9b0e7a75265688e5f71b4b7\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:06Z\\\",\\\"message\\\":\\\" successful for *v1.Pod openshift-dns/node-resolver-pbqp7 after 0 failed attempt(s)\\\\nI1205 17:34:05.786466 6660 default_network_controller.go:776] Recording success event on pod openshift-dns/node-resolver-pbqp7\\\\nI1205 17:34:05.785031 6660 obj_retry.go:365] Adding new object: *v1.Pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786494 6660 ovn.go:134] Ensuring zone local for Pod openshift-image-registry/node-ca-qc49n in node crc\\\\nI1205 17:34:05.786505 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-image-registry/node-ca-qc49n after 0 failed attempt(s)\\\\nI1205 17:34:05.786512 6660 default_network_controller.go:776] Recording success event on pod openshift-image-registry/node-ca-qc49n\\\\nI1205 17:34:05.786502 6660 model_client.go:398] Mutate operations generated as: [{Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574b26}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI1205 17:34:05.784968 6660 obj_retry.go:386] Retry successful for *v1.Pod openshift-network-operator/iptables-alerter-4ln5h after 0 failed attempt(s)\\\\nI1205 17:34:05.786525 6660 default_network_controller.go:776] Recording success event on po\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:05Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:38Z\\\",\\\"message\\\":\\\"n\\\\nI1205 17:34:37.857440 7012 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}\\\\nI1205 17:34:37.856263 7012 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1205 17:34:37.857362 7012 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:38Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.943004 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.943066 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.943099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.943125 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:38 crc kubenswrapper[4961]: I1205 17:34:38.943142 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:38Z","lastTransitionTime":"2025-12-05T17:34:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.045390 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.045435 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.045448 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.045469 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.045484 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.148208 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.148265 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.148279 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.148304 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.148318 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.250541 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.250607 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.250620 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.250639 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.250650 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.354021 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.354070 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.354083 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.354100 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.354114 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.456159 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.456221 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.456232 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.456247 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.456257 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.559222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.559267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.559277 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.559293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.559304 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.661895 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.661943 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.661955 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.661970 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.661981 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.672913 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/3.log" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.676599 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:34:39 crc kubenswrapper[4961]: E1205 17:34:39.676791 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.694541 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.707620 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.721046 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.735161 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.757919 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.764892 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.764946 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.764959 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.764976 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.764987 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.774105 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.798490 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:38Z\\\",\\\"message\\\":\\\"n\\\\nI1205 17:34:37.857440 7012 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}\\\\nI1205 17:34:37.856263 7012 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1205 17:34:37.857362 7012 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.815278 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.830520 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.843869 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.857448 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.862971 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:39 crc kubenswrapper[4961]: E1205 17:34:39.863092 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.867157 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.867206 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.867218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.867235 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.867247 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.873315 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.891186 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.908120 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.926796 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.943788 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.957724 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:39Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.970160 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.970203 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.970216 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.970238 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:39 crc kubenswrapper[4961]: I1205 17:34:39.970248 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:39Z","lastTransitionTime":"2025-12-05T17:34:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.072589 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.072640 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.072650 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.072667 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.072680 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.174995 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.175060 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.175075 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.175103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.175116 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.277945 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.277993 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.278008 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.278028 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.278040 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.380298 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.380605 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.380681 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.380865 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.380981 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.483717 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.484113 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.484476 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.484934 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.485093 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.587700 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.587845 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.587863 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.587881 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.587892 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.690589 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.691119 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.691138 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.691161 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.691174 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.795442 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.795508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.795520 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.795540 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.795555 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.863266 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:40 crc kubenswrapper[4961]: E1205 17:34:40.863754 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.863435 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:40 crc kubenswrapper[4961]: E1205 17:34:40.864054 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.863266 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:40 crc kubenswrapper[4961]: E1205 17:34:40.864250 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.897616 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.897657 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.897669 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.897685 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:40 crc kubenswrapper[4961]: I1205 17:34:40.897697 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:40Z","lastTransitionTime":"2025-12-05T17:34:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.000686 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.000721 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.000733 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.000751 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.000763 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.103839 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.103908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.103933 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.103966 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.103992 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.208044 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.208109 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.208126 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.208146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.208175 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.311464 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.311552 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.311585 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.311620 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.311642 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.415469 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.415554 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.415580 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.415615 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.415640 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.517843 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.517903 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.517918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.517941 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.517958 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.620350 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.620396 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.620407 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.620424 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.620435 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.723079 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.723127 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.723137 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.723155 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.723166 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.825677 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.825717 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.825725 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.825740 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.825752 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.863445 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:41 crc kubenswrapper[4961]: E1205 17:34:41.863623 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.929038 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.929080 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.929092 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.929111 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:41 crc kubenswrapper[4961]: I1205 17:34:41.929124 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:41Z","lastTransitionTime":"2025-12-05T17:34:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.032584 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.032636 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.032649 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.032679 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.032693 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.136001 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.136095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.136105 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.136121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.136133 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.238593 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.238638 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.238648 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.238663 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.238675 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.341501 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.341585 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.341599 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.341628 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.341643 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.444757 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.444826 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.444836 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.444855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.444868 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.547204 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.547259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.547271 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.547293 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.547307 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.650160 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.650217 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.650226 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.650245 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.650256 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.693491 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.693540 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.693556 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.693579 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.693595 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.710603 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.715679 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.715752 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.715818 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.715854 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.715878 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.730602 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.734956 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.735009 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.735023 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.735041 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.735055 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.749450 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.754980 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.755027 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.755037 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.755056 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.755072 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.776823 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.783384 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.783453 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.783471 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.783498 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.783516 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.798341 4961 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"f7e99468-3bda-480f-aff7-5c637658e842\\\",\\\"systemUUID\\\":\\\"76caf0b9-12fa-49d9-8944-44d70ddec643\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:42Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.798512 4961 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.800237 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.800264 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.800274 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.800291 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.800301 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.863185 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.863266 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.863424 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.863543 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.863663 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:42 crc kubenswrapper[4961]: E1205 17:34:42.863883 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.902841 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.902927 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.902942 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.902960 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:42 crc kubenswrapper[4961]: I1205 17:34:42.902973 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:42Z","lastTransitionTime":"2025-12-05T17:34:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.005583 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.005623 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.005633 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.005651 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.005663 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.108963 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.109016 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.109029 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.109058 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.109071 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.212446 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.212528 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.212545 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.212570 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.212586 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.317707 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.317763 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.317792 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.317815 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.317827 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.424745 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.424848 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.424863 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.424884 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.424904 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.527400 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.527712 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.527840 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.527950 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.528044 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.631267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.631328 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.631341 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.631363 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.631377 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.734394 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.734454 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.734464 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.734479 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.734491 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.837119 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.837179 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.837190 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.837212 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.837224 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.862991 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:43 crc kubenswrapper[4961]: E1205 17:34:43.863336 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.940685 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.940742 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.940753 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.940787 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:43 crc kubenswrapper[4961]: I1205 17:34:43.940802 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:43Z","lastTransitionTime":"2025-12-05T17:34:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.043972 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.044036 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.044048 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.044070 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.044084 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.147505 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.147562 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.147579 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.147603 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.147619 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.250413 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.250572 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.250583 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.250603 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.250617 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.353373 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.353440 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.353455 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.353475 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.353487 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.456189 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.456236 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.456248 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.456264 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.456275 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.559233 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.559287 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.559302 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.559320 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.559336 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.661802 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.661913 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.661929 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.661948 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.661977 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.765222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.765288 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.765307 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.765333 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.765352 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.862585 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.862634 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:44 crc kubenswrapper[4961]: E1205 17:34:44.862744 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.862888 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:44 crc kubenswrapper[4961]: E1205 17:34:44.863062 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:44 crc kubenswrapper[4961]: E1205 17:34:44.863109 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.867855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.867957 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.867979 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.868008 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.868026 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.875827 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"6f24429b-a57e-47d0-8354-87ff9d6bcee8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:50Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wz4w8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:50Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-pgc6p\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.880884 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.894771 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.907857 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pbqp7" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d30454a9-4134-4c4e-a5e6-098cf956a769\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52f98e6ea652c2fe497b263c05c3a7d19d3de27af51a5533cb6bf82f369209c9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-t82bj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pbqp7\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.924405 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d176b9c3-755b-47b4-a7dd-a709873feaa8\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:48Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"\\\\\\\" limit=400\\\\nI1205 17:33:34.586841 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI1205 17:33:34.668411 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nW1205 17:33:34.668478 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668486 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW1205 17:33:34.668494 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW1205 17:33:34.668498 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW1205 17:33:34.668501 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW1205 17:33:34.668505 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nI1205 17:33:34.668623 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nI1205 17:33:34.672167 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672208 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1205 17:33:34.672256 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672265 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1205 17:33:34.672279 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1205 17:33:34.672284 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nF1205 17:33:34.672422 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:20Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.939202 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.959819 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://61a6fb1bcb0acfce0f7843b954ebf3a01097b01afdcfc9df841abeb3e4fe49b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.970885 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.970935 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.970946 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.970964 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.970978 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:44Z","lastTransitionTime":"2025-12-05T17:34:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.974899 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c048c267-061b-479b-9d63-b3aee093d9f6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://44aa860de8961d6b96e08338c9e20b509ef1dbddad2f4e40fff6bf124b5ad43d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-crm8x\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-4vc27\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:44 crc kubenswrapper[4961]: I1205 17:34:44.998066 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f64daea3-7a90-4012-bd0c-31b137bd1cae\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:38Z\\\",\\\"message\\\":\\\"n\\\\nI1205 17:34:37.857440 7012 loadbalancer.go:304] Deleted 0 stale LBs for map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-authentication-operator/metrics\\\\\\\"}\\\\nI1205 17:34:37.856263 7012 ovn.go:134] Ensuring zone local for Pod openshift-network-operator/network-operator-58b4c7f79c-55gtf in node crc\\\\nI1205 17:34:37.857362 7012 transact.go:42] Configuring OVN: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]} options:{GoMap:map[iface-id-ver:3b6479f0-333b-4a96-9adf-2099afdc2447 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:04 10.217.0.4]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {61897e97-c771-4738-8709-09636387cb00}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Logical_Switch Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {7e8bb06a-06a5-45bc-a752-26a17d322811}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:Port_Group Row:map[] Rows:[] Columns:[] Mutations:[{Column:ports Mutator:insert Value:{GoSet:[{GoUUID:61897e97-c771-4738-8709-09636387cb00}]}}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c02bd945-d57b-49ff-9cd3-202ed3574\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:34:36Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-pr468\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-5w9vd\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:44Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.010350 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d638f18e-c7e2-48c2-b4aa-972c346b89dc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:19Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://967aa7122fc74e8e86f3b3c765c5a0e58363246f6d92e15624c7fdde25fa8520\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://9d5acf2a4e31671e482dff718f50ebbb3ef0c81e57efbdc414fc26bb8135958f\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5acdded4bc4cee8d55747a3e955461a3f1bcbe85878c00be04dfa4f5876c7863\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.023889 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"daf1b82d-41cc-464f-b868-78a2929f63e1\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://3bbda92a962aed8a27fbb81050c0587889e326b89bca4fe276dd1754a4ee32e0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://51ceaa833433903bbf371fffcd7e3d0dcb3d74fe2497134dd43e18570b8e9c77\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://4573e4acbc0645239c97674ae61b4f2d952fcd5b0929f51b8f952b97b1de1eb6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://386664d93709083a63520c993930de5794f10edb977c945fafa2093e4e1e451d\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:17Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:15Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.037071 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:34Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.054004 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://500825fe9883c9df8faa80bae8d05b02fc9ce2cdabf7eafe8ef2392c7cfdc37f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8346ab61890435cfe765c6a7c374d756ccc9aba06dd87588654f3618fdc5de68\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:35Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.070149 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2068823e9db53d4e324e481258980ee6a2dd2644ebf5ac1edea90876d59aeea\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.073811 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.073881 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.073899 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.073923 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.073939 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.090193 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d34d6a8d-1b83-4af1-afd3-76ba46d02e3b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f2f0ebc0d917781b93e3848439cdfeba56b89f9716773c0dbd68ca5aeb866bf2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b0f847259b376cbde321b4023cb39279d84ce20cffb7048fecf4b76382de822\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://512b3fbcc4b1186bade60ba16e7d5e04a5b2f9f28d75f16a35a3d7e4f03fd32b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:39Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4414df37cd511d7a9215d7423f7146fc401b00474c69858dc1633fd781b6b604\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://df1fc644d4fca96f28d655046d94a8bdc2674cf91f58d58768c071015f99a6f4\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e483ba05accdf27d837de8060ee6061a731b3538d27533c9f55cf43cda66f0ce\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f6259ab133eb86da0bfc21664a185088c2fac4e976d022e91c406b71e6627b34\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-12-05T17:33:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-nvkk4\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-sxfzb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.103371 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-bgtgs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"26618630-1782-4ae8-af12-6f913fbddf5b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:34:25Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-12-05T17:34:23Z\\\",\\\"message\\\":\\\"2025-12-05T17:33:38+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40\\\\n2025-12-05T17:33:38+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_4205d582-be47-42de-92da-c3b1c4e06d40 to /host/opt/cni/bin/\\\\n2025-12-05T17:33:38Z [verbose] multus-daemon started\\\\n2025-12-05T17:33:38Z [verbose] Readiness Indicator file check\\\\n2025-12-05T17:34:23Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-12-05T17:33:37Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:34:24Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-2l2tj\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-bgtgs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.113798 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qc49n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"90ca8c0d-e913-462b-9366-b6609477abb1\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:41Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1178272d0411b920546def1fa7f1033a9a328306a0245f9b30a884a4140108b2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cscxd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:40Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qc49n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.126507 4961 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"31e6cede-369d-4288-b388-c28aae76a50b\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-12-05T17:33:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://1cd217f59fdbb11c5b8edec0f4f4a4930e22c419cc8a9048443a711641b50445\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://809381123c6003acbaac767394ebee9da31e022a661eaf3e1b601a7192664d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-12-05T17:33:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8zj55\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-12-05T17:33:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-fdb4n\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-12-05T17:34:45Z is after 2025-08-24T17:21:41Z" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.181262 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.181329 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.181354 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.181386 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.181402 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.285947 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.286006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.286017 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.286034 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.286046 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.388196 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.388248 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.388258 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.388275 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.388287 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.491410 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.491470 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.491488 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.491511 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.491527 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.594006 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.594058 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.594069 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.594088 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.594099 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.696136 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.696485 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.696558 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.696628 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.696697 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.799414 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.799457 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.799467 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.799482 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.799493 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.863099 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:45 crc kubenswrapper[4961]: E1205 17:34:45.863284 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.902396 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.902737 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.902866 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.902988 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:45 crc kubenswrapper[4961]: I1205 17:34:45.903087 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:45Z","lastTransitionTime":"2025-12-05T17:34:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.007073 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.007127 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.007141 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.007162 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.007176 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.109942 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.109977 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.109986 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.110002 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.110013 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.213316 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.213389 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.213400 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.213421 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.213433 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.317240 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.317294 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.317308 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.317327 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.317341 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.420578 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.420647 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.420662 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.420684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.420702 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.523715 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.523770 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.523790 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.523804 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.523815 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.627872 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.627914 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.627924 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.627941 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.627954 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.732137 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.732218 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.732230 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.732252 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.732268 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.835262 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.835324 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.835349 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.835376 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.835395 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.863152 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.863203 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:46 crc kubenswrapper[4961]: E1205 17:34:46.863312 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.863444 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:46 crc kubenswrapper[4961]: E1205 17:34:46.863821 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:46 crc kubenswrapper[4961]: E1205 17:34:46.863839 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.938267 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.938324 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.938334 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.938355 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:46 crc kubenswrapper[4961]: I1205 17:34:46.938371 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:46Z","lastTransitionTime":"2025-12-05T17:34:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.040979 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.041051 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.041066 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.041092 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.041119 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.143477 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.143522 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.143536 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.143556 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.143573 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.246335 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.247024 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.247043 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.247074 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.247089 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.349988 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.350057 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.350073 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.350099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.350113 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.453544 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.453588 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.453603 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.453630 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.453663 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.556699 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.556752 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.556762 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.556799 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.556818 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.658909 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.658991 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.659017 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.659052 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.659078 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.762100 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.762145 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.762158 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.762178 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.762190 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.862999 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:47 crc kubenswrapper[4961]: E1205 17:34:47.863523 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.865228 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.865311 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.865326 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.865346 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.865360 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.968072 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.968132 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.968146 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.968169 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:47 crc kubenswrapper[4961]: I1205 17:34:47.968183 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:47Z","lastTransitionTime":"2025-12-05T17:34:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.071355 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.071434 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.071469 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.071496 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.071511 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.174684 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.174735 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.174744 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.174767 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.174795 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.278824 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.278890 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.278904 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.278926 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.278941 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.381983 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.382056 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.382077 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.382103 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.382117 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.485632 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.485692 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.485705 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.485722 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.485735 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.588331 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.588383 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.588393 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.588410 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.588421 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.691046 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.691101 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.691114 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.691131 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.691143 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.794343 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.794380 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.794389 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.794404 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.794414 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.863499 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.863522 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:48 crc kubenswrapper[4961]: E1205 17:34:48.863705 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.863727 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:48 crc kubenswrapper[4961]: E1205 17:34:48.863830 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:48 crc kubenswrapper[4961]: E1205 17:34:48.863934 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.896247 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.896320 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.896337 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.896366 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.896385 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.999690 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:48 crc kubenswrapper[4961]: I1205 17:34:48.999755 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:48.999812 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:48.999841 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:48.999858 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:48Z","lastTransitionTime":"2025-12-05T17:34:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.102640 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.102721 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.102756 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.102834 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.102867 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.205945 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.205998 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.206007 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.206024 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.206036 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.308918 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.308978 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.308995 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.309019 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.309035 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.411647 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.411698 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.411710 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.411729 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.411740 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.515592 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.515902 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.515925 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.515947 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.515960 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.619155 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.619236 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.619270 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.619306 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.619327 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.722130 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.722177 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.722190 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.722209 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.722222 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.825215 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.825268 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.825277 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.825296 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.825307 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.863320 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:49 crc kubenswrapper[4961]: E1205 17:34:49.863519 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.877083 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.928099 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.928142 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.928151 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.928167 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:49 crc kubenswrapper[4961]: I1205 17:34:49.928178 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:49Z","lastTransitionTime":"2025-12-05T17:34:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.030898 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.031444 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.031667 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.031915 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.032141 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.135759 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.135824 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.135836 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.135855 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.135868 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.239333 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.239374 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.239386 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.239401 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.239413 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.342259 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.342305 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.342316 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.342359 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.342371 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.446552 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.446634 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.446648 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.446674 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.446691 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.549411 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.549461 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.549477 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.549495 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.549508 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.652121 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.652169 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.652178 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.652194 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.652203 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.755222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.755294 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.755316 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.755346 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.755366 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.858984 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.859042 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.859054 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.859071 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.859081 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.863411 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.863530 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.863416 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:50 crc kubenswrapper[4961]: E1205 17:34:50.863589 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:50 crc kubenswrapper[4961]: E1205 17:34:50.863667 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:50 crc kubenswrapper[4961]: E1205 17:34:50.863743 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.961710 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.961801 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.961820 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.961840 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:50 crc kubenswrapper[4961]: I1205 17:34:50.961856 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:50Z","lastTransitionTime":"2025-12-05T17:34:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.065330 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.065388 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.065405 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.065427 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.065444 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.169026 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.169073 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.169082 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.169100 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.169113 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.271838 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.271893 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.271908 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.271938 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.271959 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.374935 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.374993 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.375007 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.375047 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.375059 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.478222 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.478318 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.478347 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.478396 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.478415 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.581030 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.581095 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.581104 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.581123 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.581133 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.682810 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.682878 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.682889 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.682906 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.682916 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.785474 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.785548 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.785565 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.785588 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.785605 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.863437 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:51 crc kubenswrapper[4961]: E1205 17:34:51.863724 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.889050 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.889122 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.889141 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.889167 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.889187 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.992412 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.992451 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.992460 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.992474 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:51 crc kubenswrapper[4961]: I1205 17:34:51.992483 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:51Z","lastTransitionTime":"2025-12-05T17:34:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.095720 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.095817 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.095836 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.095867 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.095893 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.198462 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.198508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.198518 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.198534 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.198548 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.300831 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.300937 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.300958 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.300985 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.301003 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.403476 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.403530 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.403544 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.403561 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.403573 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.506425 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.506504 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.506516 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.506543 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.506597 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.610170 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.610229 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.610243 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.610265 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.610279 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.713444 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.713508 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.713675 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.713703 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.713719 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.815839 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.815895 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.815914 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.815981 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.816005 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.863379 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:52 crc kubenswrapper[4961]: E1205 17:34:52.863609 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.864068 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.864100 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:52 crc kubenswrapper[4961]: E1205 17:34:52.864909 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:52 crc kubenswrapper[4961]: E1205 17:34:52.865113 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.866154 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:34:52 crc kubenswrapper[4961]: E1205 17:34:52.866582 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.919072 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.919114 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.919126 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.919140 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.919150 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.974148 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.974191 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.974203 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.974221 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Dec 05 17:34:52 crc kubenswrapper[4961]: I1205 17:34:52.974232 4961 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-12-05T17:34:52Z","lastTransitionTime":"2025-12-05T17:34:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.023188 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8"] Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.023868 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.026516 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.026591 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.026738 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.026749 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.069941 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-sxfzb" podStartSLOduration=77.069911589 podStartE2EDuration="1m17.069911589s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.049173193 +0000 UTC m=+99.110323706" watchObservedRunningTime="2025-12-05 17:34:53.069911589 +0000 UTC m=+99.131062082" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.091449 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-bgtgs" podStartSLOduration=77.091414283 podStartE2EDuration="1m17.091414283s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.070573585 +0000 UTC m=+99.131724108" watchObservedRunningTime="2025-12-05 17:34:53.091414283 +0000 UTC m=+99.152564767" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.091685 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-qc49n" podStartSLOduration=77.091680519 podStartE2EDuration="1m17.091680519s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.091336282 +0000 UTC m=+99.152486755" watchObservedRunningTime="2025-12-05 17:34:53.091680519 +0000 UTC m=+99.152831012" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.106956 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5c8cf9a-4e44-4b68-aa82-717a7a925421-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.107006 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c8cf9a-4e44-4b68-aa82-717a7a925421-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.107038 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c5c8cf9a-4e44-4b68-aa82-717a7a925421-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.107199 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c5c8cf9a-4e44-4b68-aa82-717a7a925421-service-ca\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.107292 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c5c8cf9a-4e44-4b68-aa82-717a7a925421-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.136726 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=4.136708142 podStartE2EDuration="4.136708142s" podCreationTimestamp="2025-12-05 17:34:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.10416567 +0000 UTC m=+99.165316163" watchObservedRunningTime="2025-12-05 17:34:53.136708142 +0000 UTC m=+99.197858615" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.136926 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=9.136920407 podStartE2EDuration="9.136920407s" podCreationTimestamp="2025-12-05 17:34:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.136524688 +0000 UTC m=+99.197675181" watchObservedRunningTime="2025-12-05 17:34:53.136920407 +0000 UTC m=+99.198070880" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.208811 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c5c8cf9a-4e44-4b68-aa82-717a7a925421-service-ca\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.208897 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c5c8cf9a-4e44-4b68-aa82-717a7a925421-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.208929 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5c8cf9a-4e44-4b68-aa82-717a7a925421-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.208954 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c8cf9a-4e44-4b68-aa82-717a7a925421-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.209040 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c5c8cf9a-4e44-4b68-aa82-717a7a925421-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.209081 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/c5c8cf9a-4e44-4b68-aa82-717a7a925421-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.209826 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/c5c8cf9a-4e44-4b68-aa82-717a7a925421-service-ca\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.210090 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/c5c8cf9a-4e44-4b68-aa82-717a7a925421-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.215265 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c5c8cf9a-4e44-4b68-aa82-717a7a925421-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.215676 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-fdb4n" podStartSLOduration=77.215665549 podStartE2EDuration="1m17.215665549s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.199091226 +0000 UTC m=+99.260241689" watchObservedRunningTime="2025-12-05 17:34:53.215665549 +0000 UTC m=+99.276816022" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.231234 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/c5c8cf9a-4e44-4b68-aa82-717a7a925421-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-f5rb8\" (UID: \"c5c8cf9a-4e44-4b68-aa82-717a7a925421\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.262095 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-pbqp7" podStartSLOduration=77.262073263 podStartE2EDuration="1m17.262073263s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.246895981 +0000 UTC m=+99.308046474" watchObservedRunningTime="2025-12-05 17:34:53.262073263 +0000 UTC m=+99.323223736" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.297596 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=78.297569791 podStartE2EDuration="1m18.297569791s" podCreationTimestamp="2025-12-05 17:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.27843566 +0000 UTC m=+99.339586133" watchObservedRunningTime="2025-12-05 17:34:53.297569791 +0000 UTC m=+99.358720264" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.317538 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=73.317510679 podStartE2EDuration="1m13.317510679s" podCreationTimestamp="2025-12-05 17:33:40 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.313668793 +0000 UTC m=+99.374819286" watchObservedRunningTime="2025-12-05 17:34:53.317510679 +0000 UTC m=+99.378661162" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.333907 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=46.333888708 podStartE2EDuration="46.333888708s" podCreationTimestamp="2025-12-05 17:34:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.333212263 +0000 UTC m=+99.394362756" watchObservedRunningTime="2025-12-05 17:34:53.333888708 +0000 UTC m=+99.395039181" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.341347 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.368214 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podStartSLOduration=77.3681955 podStartE2EDuration="1m17.3681955s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.367188017 +0000 UTC m=+99.428338490" watchObservedRunningTime="2025-12-05 17:34:53.3681955 +0000 UTC m=+99.429345983" Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.726326 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" event={"ID":"c5c8cf9a-4e44-4b68-aa82-717a7a925421","Type":"ContainerStarted","Data":"e82ff1ab97d7e0ba449e5688c7295efa97e6802fa8bbaaaccb8229ed492cbf3f"} Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.726398 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" event={"ID":"c5c8cf9a-4e44-4b68-aa82-717a7a925421","Type":"ContainerStarted","Data":"a9da01e10b9093da5432f991ccb0ded5e8bb4ea76188a60e606ae8ae3028270a"} Dec 05 17:34:53 crc kubenswrapper[4961]: I1205 17:34:53.863273 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:53 crc kubenswrapper[4961]: E1205 17:34:53.863471 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:54 crc kubenswrapper[4961]: I1205 17:34:54.726415 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:54 crc kubenswrapper[4961]: E1205 17:34:54.726586 4961 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:34:54 crc kubenswrapper[4961]: E1205 17:34:54.726647 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs podName:6f24429b-a57e-47d0-8354-87ff9d6bcee8 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:58.726629597 +0000 UTC m=+164.787780090 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs") pod "network-metrics-daemon-pgc6p" (UID: "6f24429b-a57e-47d0-8354-87ff9d6bcee8") : object "openshift-multus"/"metrics-daemon-secret" not registered Dec 05 17:34:54 crc kubenswrapper[4961]: I1205 17:34:54.862883 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:54 crc kubenswrapper[4961]: E1205 17:34:54.863079 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:54 crc kubenswrapper[4961]: I1205 17:34:54.863311 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:54 crc kubenswrapper[4961]: E1205 17:34:54.864489 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:54 crc kubenswrapper[4961]: I1205 17:34:54.864637 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:54 crc kubenswrapper[4961]: E1205 17:34:54.864856 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:55 crc kubenswrapper[4961]: I1205 17:34:55.862992 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:55 crc kubenswrapper[4961]: E1205 17:34:55.863278 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:56 crc kubenswrapper[4961]: I1205 17:34:56.863400 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:56 crc kubenswrapper[4961]: I1205 17:34:56.863477 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:56 crc kubenswrapper[4961]: I1205 17:34:56.863422 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:56 crc kubenswrapper[4961]: E1205 17:34:56.863617 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:56 crc kubenswrapper[4961]: E1205 17:34:56.863932 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:56 crc kubenswrapper[4961]: E1205 17:34:56.863981 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:57 crc kubenswrapper[4961]: I1205 17:34:57.862759 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:57 crc kubenswrapper[4961]: E1205 17:34:57.863013 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:34:58 crc kubenswrapper[4961]: I1205 17:34:58.864153 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:34:58 crc kubenswrapper[4961]: E1205 17:34:58.864326 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:34:58 crc kubenswrapper[4961]: I1205 17:34:58.864416 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:34:58 crc kubenswrapper[4961]: I1205 17:34:58.864496 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:34:58 crc kubenswrapper[4961]: E1205 17:34:58.864639 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:34:58 crc kubenswrapper[4961]: E1205 17:34:58.864793 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:34:59 crc kubenswrapper[4961]: I1205 17:34:59.863223 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:34:59 crc kubenswrapper[4961]: E1205 17:34:59.863416 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:00 crc kubenswrapper[4961]: I1205 17:35:00.862939 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:00 crc kubenswrapper[4961]: I1205 17:35:00.863061 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:00 crc kubenswrapper[4961]: E1205 17:35:00.863278 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:00 crc kubenswrapper[4961]: I1205 17:35:00.863320 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:00 crc kubenswrapper[4961]: E1205 17:35:00.863591 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:00 crc kubenswrapper[4961]: E1205 17:35:00.863669 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:01 crc kubenswrapper[4961]: I1205 17:35:01.862893 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:01 crc kubenswrapper[4961]: E1205 17:35:01.863176 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:02 crc kubenswrapper[4961]: I1205 17:35:02.862737 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:02 crc kubenswrapper[4961]: I1205 17:35:02.862834 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:02 crc kubenswrapper[4961]: E1205 17:35:02.862924 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:02 crc kubenswrapper[4961]: E1205 17:35:02.863005 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:02 crc kubenswrapper[4961]: I1205 17:35:02.863139 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:02 crc kubenswrapper[4961]: E1205 17:35:02.863282 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:03 crc kubenswrapper[4961]: I1205 17:35:03.863370 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:03 crc kubenswrapper[4961]: E1205 17:35:03.863728 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:04 crc kubenswrapper[4961]: I1205 17:35:04.863120 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:04 crc kubenswrapper[4961]: I1205 17:35:04.863169 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:04 crc kubenswrapper[4961]: I1205 17:35:04.863236 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:04 crc kubenswrapper[4961]: E1205 17:35:04.864210 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:04 crc kubenswrapper[4961]: E1205 17:35:04.864334 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:04 crc kubenswrapper[4961]: E1205 17:35:04.864453 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:05 crc kubenswrapper[4961]: I1205 17:35:05.862985 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:05 crc kubenswrapper[4961]: I1205 17:35:05.863795 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:35:05 crc kubenswrapper[4961]: E1205 17:35:05.863996 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-5w9vd_openshift-ovn-kubernetes(f64daea3-7a90-4012-bd0c-31b137bd1cae)\"" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" Dec 05 17:35:05 crc kubenswrapper[4961]: E1205 17:35:05.864371 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:06 crc kubenswrapper[4961]: I1205 17:35:06.862802 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:06 crc kubenswrapper[4961]: I1205 17:35:06.862868 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:06 crc kubenswrapper[4961]: I1205 17:35:06.862801 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:06 crc kubenswrapper[4961]: E1205 17:35:06.862998 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:06 crc kubenswrapper[4961]: E1205 17:35:06.862942 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:06 crc kubenswrapper[4961]: E1205 17:35:06.863210 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:07 crc kubenswrapper[4961]: I1205 17:35:07.862822 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:07 crc kubenswrapper[4961]: E1205 17:35:07.863694 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:08 crc kubenswrapper[4961]: I1205 17:35:08.863074 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:08 crc kubenswrapper[4961]: I1205 17:35:08.863144 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:08 crc kubenswrapper[4961]: I1205 17:35:08.863194 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:08 crc kubenswrapper[4961]: E1205 17:35:08.864171 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:08 crc kubenswrapper[4961]: E1205 17:35:08.863914 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:08 crc kubenswrapper[4961]: E1205 17:35:08.864279 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:09 crc kubenswrapper[4961]: I1205 17:35:09.862944 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:09 crc kubenswrapper[4961]: E1205 17:35:09.863119 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.790325 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/1.log" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.790848 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/0.log" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.790912 4961 generic.go:334] "Generic (PLEG): container finished" podID="26618630-1782-4ae8-af12-6f913fbddf5b" containerID="af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab" exitCode=1 Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.790955 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerDied","Data":"af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab"} Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.790998 4961 scope.go:117] "RemoveContainer" containerID="99bee0211636006024d5b6f27f0452039356a8e7b18da5e11b8ac9d4b52650d4" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.792539 4961 scope.go:117] "RemoveContainer" containerID="af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab" Dec 05 17:35:10 crc kubenswrapper[4961]: E1205 17:35:10.792995 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-bgtgs_openshift-multus(26618630-1782-4ae8-af12-6f913fbddf5b)\"" pod="openshift-multus/multus-bgtgs" podUID="26618630-1782-4ae8-af12-6f913fbddf5b" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.812608 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-f5rb8" podStartSLOduration=94.812589367 podStartE2EDuration="1m34.812589367s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:34:53.749908476 +0000 UTC m=+99.811058959" watchObservedRunningTime="2025-12-05 17:35:10.812589367 +0000 UTC m=+116.873739840" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.863047 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.863277 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:10 crc kubenswrapper[4961]: I1205 17:35:10.863320 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:10 crc kubenswrapper[4961]: E1205 17:35:10.863413 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:10 crc kubenswrapper[4961]: E1205 17:35:10.863498 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:10 crc kubenswrapper[4961]: E1205 17:35:10.863560 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:11 crc kubenswrapper[4961]: I1205 17:35:11.795649 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/1.log" Dec 05 17:35:11 crc kubenswrapper[4961]: I1205 17:35:11.863199 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:11 crc kubenswrapper[4961]: E1205 17:35:11.863351 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:12 crc kubenswrapper[4961]: I1205 17:35:12.862770 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:12 crc kubenswrapper[4961]: I1205 17:35:12.862884 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:12 crc kubenswrapper[4961]: E1205 17:35:12.862933 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:12 crc kubenswrapper[4961]: I1205 17:35:12.862956 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:12 crc kubenswrapper[4961]: E1205 17:35:12.863048 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:12 crc kubenswrapper[4961]: E1205 17:35:12.863139 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:13 crc kubenswrapper[4961]: I1205 17:35:13.863039 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:13 crc kubenswrapper[4961]: E1205 17:35:13.863455 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:14 crc kubenswrapper[4961]: E1205 17:35:14.809426 4961 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Dec 05 17:35:14 crc kubenswrapper[4961]: I1205 17:35:14.862921 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:14 crc kubenswrapper[4961]: I1205 17:35:14.863039 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:14 crc kubenswrapper[4961]: I1205 17:35:14.863039 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:14 crc kubenswrapper[4961]: E1205 17:35:14.866060 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:14 crc kubenswrapper[4961]: E1205 17:35:14.866202 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:14 crc kubenswrapper[4961]: E1205 17:35:14.866342 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:15 crc kubenswrapper[4961]: E1205 17:35:15.379220 4961 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:35:15 crc kubenswrapper[4961]: I1205 17:35:15.863247 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:15 crc kubenswrapper[4961]: E1205 17:35:15.863390 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:16 crc kubenswrapper[4961]: I1205 17:35:16.863406 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:16 crc kubenswrapper[4961]: E1205 17:35:16.863582 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:16 crc kubenswrapper[4961]: I1205 17:35:16.863433 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:16 crc kubenswrapper[4961]: I1205 17:35:16.863883 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:16 crc kubenswrapper[4961]: E1205 17:35:16.863933 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:16 crc kubenswrapper[4961]: E1205 17:35:16.864004 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:17 crc kubenswrapper[4961]: I1205 17:35:17.863203 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:17 crc kubenswrapper[4961]: E1205 17:35:17.863617 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:18 crc kubenswrapper[4961]: I1205 17:35:18.863137 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:18 crc kubenswrapper[4961]: I1205 17:35:18.863219 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:18 crc kubenswrapper[4961]: E1205 17:35:18.863295 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:18 crc kubenswrapper[4961]: E1205 17:35:18.863370 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:18 crc kubenswrapper[4961]: I1205 17:35:18.863466 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:18 crc kubenswrapper[4961]: E1205 17:35:18.863713 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:19 crc kubenswrapper[4961]: I1205 17:35:19.862865 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:19 crc kubenswrapper[4961]: E1205 17:35:19.863078 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:19 crc kubenswrapper[4961]: I1205 17:35:19.864063 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:35:20 crc kubenswrapper[4961]: E1205 17:35:20.381113 4961 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.830449 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/3.log" Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.833680 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerStarted","Data":"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9"} Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.834193 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.862666 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.862729 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.862825 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:20 crc kubenswrapper[4961]: E1205 17:35:20.862865 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:20 crc kubenswrapper[4961]: E1205 17:35:20.863014 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:20 crc kubenswrapper[4961]: E1205 17:35:20.863102 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:20 crc kubenswrapper[4961]: I1205 17:35:20.879054 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podStartSLOduration=104.879022546 podStartE2EDuration="1m44.879022546s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:20.878328901 +0000 UTC m=+126.939479384" watchObservedRunningTime="2025-12-05 17:35:20.879022546 +0000 UTC m=+126.940173039" Dec 05 17:35:21 crc kubenswrapper[4961]: I1205 17:35:21.221166 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pgc6p"] Dec 05 17:35:21 crc kubenswrapper[4961]: I1205 17:35:21.221309 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:21 crc kubenswrapper[4961]: E1205 17:35:21.221398 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:22 crc kubenswrapper[4961]: I1205 17:35:22.863047 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:22 crc kubenswrapper[4961]: I1205 17:35:22.863085 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:22 crc kubenswrapper[4961]: I1205 17:35:22.863140 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:22 crc kubenswrapper[4961]: E1205 17:35:22.863185 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:22 crc kubenswrapper[4961]: I1205 17:35:22.863209 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:22 crc kubenswrapper[4961]: E1205 17:35:22.863312 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:22 crc kubenswrapper[4961]: E1205 17:35:22.863403 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:22 crc kubenswrapper[4961]: E1205 17:35:22.863465 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:24 crc kubenswrapper[4961]: I1205 17:35:24.862518 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:24 crc kubenswrapper[4961]: I1205 17:35:24.862625 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:24 crc kubenswrapper[4961]: I1205 17:35:24.862625 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:24 crc kubenswrapper[4961]: E1205 17:35:24.863568 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:24 crc kubenswrapper[4961]: I1205 17:35:24.863594 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:24 crc kubenswrapper[4961]: E1205 17:35:24.863673 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:24 crc kubenswrapper[4961]: E1205 17:35:24.863753 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:24 crc kubenswrapper[4961]: E1205 17:35:24.863827 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:25 crc kubenswrapper[4961]: E1205 17:35:25.382658 4961 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Dec 05 17:35:25 crc kubenswrapper[4961]: I1205 17:35:25.863359 4961 scope.go:117] "RemoveContainer" containerID="af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab" Dec 05 17:35:26 crc kubenswrapper[4961]: I1205 17:35:26.855480 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/1.log" Dec 05 17:35:26 crc kubenswrapper[4961]: I1205 17:35:26.855957 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerStarted","Data":"56a0e4e4ff839ae5900c84d69890f793ddb3bd84538df59669303c0bf98050f3"} Dec 05 17:35:26 crc kubenswrapper[4961]: I1205 17:35:26.865673 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:26 crc kubenswrapper[4961]: E1205 17:35:26.865790 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:26 crc kubenswrapper[4961]: I1205 17:35:26.865861 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:26 crc kubenswrapper[4961]: E1205 17:35:26.865917 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:26 crc kubenswrapper[4961]: I1205 17:35:26.865953 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:26 crc kubenswrapper[4961]: E1205 17:35:26.865994 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:26 crc kubenswrapper[4961]: I1205 17:35:26.866027 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:26 crc kubenswrapper[4961]: E1205 17:35:26.866081 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:28 crc kubenswrapper[4961]: I1205 17:35:28.863397 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:28 crc kubenswrapper[4961]: I1205 17:35:28.863509 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:28 crc kubenswrapper[4961]: I1205 17:35:28.863537 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:28 crc kubenswrapper[4961]: E1205 17:35:28.863660 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Dec 05 17:35:28 crc kubenswrapper[4961]: I1205 17:35:28.863712 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:28 crc kubenswrapper[4961]: E1205 17:35:28.864036 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-pgc6p" podUID="6f24429b-a57e-47d0-8354-87ff9d6bcee8" Dec 05 17:35:28 crc kubenswrapper[4961]: E1205 17:35:28.864179 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Dec 05 17:35:28 crc kubenswrapper[4961]: E1205 17:35:28.863918 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.864002 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.864086 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.864262 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.864045 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.867769 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.868643 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.868657 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.868954 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.869093 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 17:35:30 crc kubenswrapper[4961]: I1205 17:35:30.869253 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.670108 4961 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.727163 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.727754 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6px8t"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.728383 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.728567 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cc7mk"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.729015 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.729228 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.732456 4961 reflector.go:561] object-"openshift-route-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.732526 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.732573 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.733252 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f9gdg"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.733749 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.733828 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.734099 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.734397 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.740406 4961 reflector.go:561] object-"openshift-route-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.740466 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.740628 4961 reflector.go:561] object-"openshift-machine-api"/"kube-rbac-proxy": failed to list *v1.ConfigMap: configmaps "kube-rbac-proxy" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.740656 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-rbac-proxy\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-rbac-proxy\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.740734 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cxwzt"] Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.740766 4961 reflector.go:561] object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2": failed to list *v1.Secret: secrets "route-controller-manager-sa-dockercfg-h2zr2" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.740831 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"route-controller-manager-sa-dockercfg-h2zr2\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"route-controller-manager-sa-dockercfg-h2zr2\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.741098 4961 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7": failed to list *v1.Secret: secrets "machine-api-operator-dockercfg-mfbb7" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.741126 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-dockercfg-mfbb7\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-dockercfg-mfbb7\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.741403 4961 reflector.go:561] object-"openshift-route-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.741445 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.742149 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.744477 4961 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-tls": failed to list *v1.Secret: secrets "machine-api-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.744523 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"machine-api-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.744649 4961 reflector.go:561] object-"openshift-machine-api"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.744676 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.744804 4961 reflector.go:561] object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc": failed to list *v1.Secret: secrets "oauth-openshift-dockercfg-znhcc" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.744834 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"oauth-openshift-dockercfg-znhcc\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"oauth-openshift-dockercfg-znhcc\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.744943 4961 reflector.go:561] object-"openshift-route-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-route-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.744975 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-route-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-route-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.745075 4961 reflector.go:561] object-"openshift-controller-manager"/"client-ca": failed to list *v1.ConfigMap: configmaps "client-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.745108 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"client-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"client-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.748826 4961 reflector.go:561] object-"openshift-machine-api"/"machine-api-operator-images": failed to list *v1.ConfigMap: configmaps "machine-api-operator-images" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.748895 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"machine-api-operator-images\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"machine-api-operator-images\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.749031 4961 reflector.go:561] object-"openshift-controller-manager"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.749066 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.749326 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data": failed to list *v1.Secret: secrets "v4-0-config-user-idp-0-file-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.749355 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-idp-0-file-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-idp-0-file-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.750158 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"etcd-serving-ca": failed to list *v1.ConfigMap: configmaps "etcd-serving-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.750190 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"etcd-serving-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"etcd-serving-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.754351 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq": failed to list *v1.Secret: secrets "oauth-apiserver-sa-dockercfg-6r2bq" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.756214 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-sp65b"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.771348 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.771709 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd"] Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.756344 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"oauth-apiserver-sa-dockercfg-6r2bq\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"oauth-apiserver-sa-dockercfg-6r2bq\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.772132 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.772562 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.772945 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.773949 4961 reflector.go:561] object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c": failed to list *v1.Secret: secrets "openshift-controller-manager-sa-dockercfg-msq4c" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774007 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-controller-manager-sa-dockercfg-msq4c\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"openshift-controller-manager-sa-dockercfg-msq4c\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774123 4961 reflector.go:561] object-"openshift-controller-manager"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774136 4961 reflector.go:561] object-"openshift-controller-manager"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774144 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774169 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774269 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-service-ca": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-service-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774288 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-service-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-service-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774270 4961 reflector.go:561] object-"openshift-authentication-operator"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774311 4961 reflector.go:561] object-"openshift-authentication-operator"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774308 4961 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-config": failed to list *v1.ConfigMap: configmaps "authentication-operator-config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774331 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774356 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"authentication-operator-config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774375 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"audit-1": failed to list *v1.ConfigMap: configmaps "audit-1" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774390 4961 reflector.go:561] object-"openshift-authentication-operator"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774400 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"audit-1\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"audit-1\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774406 4961 reflector.go:561] object-"openshift-authentication"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774435 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-error": failed to list *v1.Secret: secrets "v4-0-config-user-template-error" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774435 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774407 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774456 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-serving-cert": failed to list *v1.Secret: secrets "v4-0-config-system-serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774475 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774453 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-error\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-error\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774498 4961 reflector.go:561] object-"openshift-image-registry"/"trusted-ca": failed to list *v1.ConfigMap: configmaps "trusted-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774512 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"etcd-client": failed to list *v1.Secret: secrets "etcd-client" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774519 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"trusted-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774530 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"etcd-client\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"etcd-client\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774320 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774614 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"serving-cert": failed to list *v1.Secret: secrets "serving-cert" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774640 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"serving-cert\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"serving-cert\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774678 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-provider-selection": failed to list *v1.Secret: secrets "v4-0-config-user-template-provider-selection" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774704 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-provider-selection\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-provider-selection\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774734 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-user-template-login": failed to list *v1.Secret: secrets "v4-0-config-user-template-login" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774753 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-user-template-login\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-user-template-login\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774912 4961 reflector.go:561] object-"openshift-controller-manager"/"openshift-global-ca": failed to list *v1.ConfigMap: configmaps "openshift-global-ca" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.774935 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"openshift-global-ca\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-global-ca\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.774996 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775015 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775060 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"encryption-config-1": failed to list *v1.Secret: secrets "encryption-config-1" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775074 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"encryption-config-1\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"encryption-config-1\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775117 4961 reflector.go:561] object-"openshift-controller-manager"/"config": failed to list *v1.ConfigMap: configmaps "config" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-controller-manager": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775128 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-controller-manager\"/\"config\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"config\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-controller-manager\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775211 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-router-certs": failed to list *v1.Secret: secrets "v4-0-config-system-router-certs" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775225 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-router-certs\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-router-certs\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775270 4961 reflector.go:561] object-"openshift-machine-api"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-machine-api": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775282 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-machine-api\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-machine-api\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775328 4961 reflector.go:561] object-"openshift-authentication"/"audit": failed to list *v1.ConfigMap: configmaps "audit" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775340 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"audit\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"audit\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775375 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775387 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775425 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-session": failed to list *v1.Secret: secrets "v4-0-config-system-session" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775437 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-session\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-session\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775543 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.775558 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.775732 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.775970 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template": failed to list *v1.Secret: secrets "v4-0-config-system-ocp-branding-template" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776007 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-ocp-branding-template\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"v4-0-config-system-ocp-branding-template\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776043 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-serving-cert\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776076 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcpz7\" (UniqueName: \"kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776099 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/475e25de-63ce-4cae-8fc6-4c057d616247-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776126 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776171 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gdbnr\" (UniqueName: \"kubernetes.io/projected/892bdd9c-95f3-450b-9b7b-917e481f0d6f-kube-api-access-gdbnr\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776192 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-images\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776050 4961 reflector.go:561] object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx": failed to list *v1.Secret: secrets "cluster-image-registry-operator-dockercfg-m4qtx" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776314 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"cluster-image-registry-operator-dockercfg-m4qtx\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"cluster-image-registry-operator-dockercfg-m4qtx\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776058 4961 reflector.go:561] object-"openshift-authentication"/"v4-0-config-system-cliconfig": failed to list *v1.ConfigMap: configmaps "v4-0-config-system-cliconfig" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776337 4961 reflector.go:561] object-"openshift-authentication-operator"/"service-ca-bundle": failed to list *v1.ConfigMap: configmaps "service-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776345 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"v4-0-config-system-cliconfig\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"v4-0-config-system-cliconfig\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776145 4961 reflector.go:561] object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj": failed to list *v1.Secret: secrets "authentication-operator-dockercfg-mz9bj" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776356 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"service-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"service-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776371 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"authentication-operator-dockercfg-mz9bj\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"authentication-operator-dockercfg-mz9bj\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776252 4961 reflector.go:561] object-"openshift-authentication"/"openshift-service-ca.crt": failed to list *v1.ConfigMap: configmaps "openshift-service-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776379 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776398 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication\"/\"openshift-service-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"openshift-service-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776274 4961 reflector.go:561] object-"openshift-authentication-operator"/"trusted-ca-bundle": failed to list *v1.ConfigMap: configmaps "trusted-ca-bundle" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-authentication-operator": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776479 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776489 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776514 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-dir\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776506 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-authentication-operator\"/\"trusted-ca-bundle\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"trusted-ca-bundle\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-authentication-operator\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776268 4961 reflector.go:561] object-"openshift-oauth-apiserver"/"kube-root-ca.crt": failed to list *v1.ConfigMap: configmaps "kube-root-ca.crt" is forbidden: User "system:node:crc" cannot list resource "configmaps" in API group "" in the namespace "openshift-oauth-apiserver": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776561 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-oauth-apiserver\"/\"kube-root-ca.crt\": Failed to watch *v1.ConfigMap: failed to list *v1.ConfigMap: configmaps \"kube-root-ca.crt\" is forbidden: User \"system:node:crc\" cannot list resource \"configmaps\" in API group \"\" in the namespace \"openshift-oauth-apiserver\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: W1205 17:35:33.776330 4961 reflector.go:561] object-"openshift-image-registry"/"image-registry-operator-tls": failed to list *v1.Secret: secrets "image-registry-operator-tls" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "openshift-image-registry": no relationship found between node 'crc' and this object Dec 05 17:35:33 crc kubenswrapper[4961]: E1205 17:35:33.776591 4961 reflector.go:158] "Unhandled Error" err="object-\"openshift-image-registry\"/\"image-registry-operator-tls\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"image-registry-operator-tls\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"openshift-image-registry\": no relationship found between node 'crc' and this object" logger="UnhandledError" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776660 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k4gjg\" (UniqueName: \"kubernetes.io/projected/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-kube-api-access-k4gjg\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776702 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776734 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776761 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776832 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776886 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776925 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776952 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.776985 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777013 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777059 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-serving-cert\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777117 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777179 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbxd4\" (UniqueName: \"kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777273 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777306 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-dir\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777405 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4p96n\" (UniqueName: \"kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777500 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777526 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777549 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wxxhd\" (UniqueName: \"kubernetes.io/projected/e44fec59-fa03-4dd6-be86-108902060c91-kube-api-access-wxxhd\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777571 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777825 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777855 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-service-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777882 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777905 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777928 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.777956 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778039 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778068 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-config\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778094 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778114 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778137 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778161 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h2dzz\" (UniqueName: \"kubernetes.io/projected/475e25de-63ce-4cae-8fc6-4c057d616247-kube-api-access-h2dzz\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778185 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778208 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-encryption-config\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.778231 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.781714 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.781967 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.782182 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.782430 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.782474 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.782438 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.782587 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.782823 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.783001 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.783161 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.783318 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.783498 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.783793 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.784983 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.785143 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.785380 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.785522 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.790006 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.790693 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6pfr2"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.791088 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.791595 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.793144 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2msfx"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.793509 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ksm5f"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.795547 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.795797 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.796026 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.796289 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.796539 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.797071 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.797590 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.797731 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.797823 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798027 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798295 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798353 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798431 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798709 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798911 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.798294 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.799057 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.799610 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vg677"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.800414 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.809197 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.809351 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.809734 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.809733 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.809831 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.810336 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.810453 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.810518 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8vtdg"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.810711 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.810845 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.810962 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.811152 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.811157 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.811255 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.811355 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.811644 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.811817 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.812604 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hlv6r"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.824363 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.830333 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.830541 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.830648 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.830902 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.831080 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.831401 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.835016 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.835346 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.835808 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.837140 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6px8t"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.837163 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.839296 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.841905 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.842971 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cc7mk"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.843000 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.843352 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-nksfl"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.843754 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cxwzt"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.843903 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.843930 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.844169 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.844379 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.850523 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.851364 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.853496 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.854102 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.856143 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.856928 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.864910 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xlpsm"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.865703 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.866155 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.866797 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.867569 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c4krp"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.868042 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.869095 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.870270 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.871078 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.872954 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.875281 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.876016 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879010 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k4gjg\" (UniqueName: \"kubernetes.io/projected/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-kube-api-access-k4gjg\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879049 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879077 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879102 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879125 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879168 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879187 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879207 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879228 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879250 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879278 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-oauth-serving-cert\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879302 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-serving-cert\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879327 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879349 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbxd4\" (UniqueName: \"kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879373 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr47f\" (UniqueName: \"kubernetes.io/projected/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-kube-api-access-wr47f\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879399 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879421 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-service-ca\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879444 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-dir\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879466 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p96n\" (UniqueName: \"kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879487 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879511 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxxhd\" (UniqueName: \"kubernetes.io/projected/e44fec59-fa03-4dd6-be86-108902060c91-kube-api-access-wxxhd\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879554 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879578 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879602 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879623 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879647 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-service-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879670 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879689 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-config\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879706 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879724 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879738 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879757 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-config\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879790 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879809 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-config\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879826 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879844 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879863 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879879 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-encryption-config\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879897 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879916 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h2dzz\" (UniqueName: \"kubernetes.io/projected/475e25de-63ce-4cae-8fc6-4c057d616247-kube-api-access-h2dzz\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879934 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-oauth-config\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879952 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcpz7\" (UniqueName: \"kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879967 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/475e25de-63ce-4cae-8fc6-4c057d616247-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879983 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-serving-cert\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.879998 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-serving-cert\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880016 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880039 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gdbnr\" (UniqueName: \"kubernetes.io/projected/892bdd9c-95f3-450b-9b7b-917e481f0d6f-kube-api-access-gdbnr\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880066 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880082 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-images\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880101 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-ca\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880115 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-trusted-ca-bundle\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880132 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880148 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-dir\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880165 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8hnqz\" (UniqueName: \"kubernetes.io/projected/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-kube-api-access-8hnqz\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880182 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-serving-cert\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880200 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-service-ca\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880215 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-client\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.880299 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-dir\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.881217 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-dir\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.886322 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-74j6d"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.887242 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.888752 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.889637 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.889811 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.892841 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.896346 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.897997 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-serving-cert\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.902851 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bghjx"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.906222 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.906382 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.907241 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-f6zsw"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.907432 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.907796 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.908212 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.908397 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.908667 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.909682 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.910719 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6pfr2"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.912305 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.912333 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2msfx"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.913299 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-sp65b"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.915749 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.917482 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.918734 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.921384 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.921598 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.931166 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f9gdg"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.932192 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.938261 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.939347 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.940963 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8vtdg"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.950721 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.956961 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.957478 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.963805 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ksm5f"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.966255 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.970819 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.973328 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c4krp"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.974304 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.984845 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hlv6r"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.985652 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xlpsm"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.986939 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.987677 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-trusted-ca-bundle\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.987806 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-ca\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.987911 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-service-ca\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.988017 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-client\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.988414 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8hnqz\" (UniqueName: \"kubernetes.io/projected/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-kube-api-access-8hnqz\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.988500 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-serving-cert\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.988679 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-oauth-serving-cert\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.988820 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr47f\" (UniqueName: \"kubernetes.io/projected/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-kube-api-access-wr47f\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.988971 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-service-ca\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.989143 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-config\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.989269 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-config\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.989372 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-oauth-config\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.989519 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-serving-cert\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.995800 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-service-ca\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.995884 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-service-ca\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.996506 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-oauth-serving-cert\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.996677 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vg677"] Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.997293 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-trusted-ca-bundle\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.997795 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-config\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.998355 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-config\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:33 crc kubenswrapper[4961]: I1205 17:35:33.999622 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-ca\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.004452 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-etcd-client\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.006354 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-serving-cert\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.007646 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-serving-cert\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.012202 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.021464 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.025861 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.029270 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.029530 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.030129 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-oauth-config\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.037492 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.039961 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.040861 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-mzkwv"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.041903 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-hg4b6"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.042405 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.042650 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.042816 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bghjx"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.052669 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.054940 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.055833 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-74j6d"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.057802 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-hg4b6"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.058141 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-mzkwv"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.059111 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wm6wm"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.060208 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wm6wm"] Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.060345 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.071024 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.091347 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.111294 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.138688 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.150652 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.171291 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.191304 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.211325 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.251934 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.271478 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.292303 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.311651 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.333514 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.352093 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.372739 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.392070 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.412568 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.431712 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.462342 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.471508 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.493004 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.511295 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.532037 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.550959 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.572022 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.592315 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.610995 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.631938 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.651577 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.671278 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.693370 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.711497 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.731720 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.753568 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.771666 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.792151 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.811031 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.851541 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.869851 4961 request.go:700] Waited for 1.015374502s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-operator-lifecycle-manager/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.871821 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880923 4961 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880981 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-ocp-branding-template: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880989 4961 configmap.go:193] Couldn't get configMap openshift-authentication-operator/service-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881011 4961 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881024 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-session: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881039 4961 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880989 4961 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881058 4961 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880927 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-router-certs: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881081 4961 secret.go:188] Couldn't get secret openshift-machine-api/machine-api-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881011 4961 configmap.go:193] Couldn't get configMap openshift-authentication/audit: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881105 4961 secret.go:188] Couldn't get secret openshift-oauth-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880948 4961 secret.go:188] Couldn't get secret openshift-oauth-apiserver/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881112 4961 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-cliconfig: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881166 4961 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-service-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881168 4961 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881174 4961 secret.go:188] Couldn't get secret openshift-oauth-apiserver/encryption-config-1: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881183 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881228 4961 configmap.go:193] Couldn't get configMap openshift-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881228 4961 secret.go:188] Couldn't get secret openshift-image-registry/image-registry-operator-tls: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881248 4961 configmap.go:193] Couldn't get configMap openshift-authentication-operator/authentication-operator-config: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880961 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-provider-selection: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881271 4961 configmap.go:193] Couldn't get configMap openshift-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880962 4961 configmap.go:193] Couldn't get configMap openshift-image-registry/trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881292 4961 configmap.go:193] Couldn't get configMap openshift-machine-api/machine-api-operator-images: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881207 4961 configmap.go:193] Couldn't get configMap openshift-authentication-operator/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880968 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-idp-0-file-data: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.880942 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-error: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881068 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381012585 +0000 UTC m=+141.442163068 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881374 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381360993 +0000 UTC m=+141.442511476 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-ocp-branding-template" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881389 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-service-ca-bundle podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381381094 +0000 UTC m=+141.442531567 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "service-ca-bundle" (UniqueName: "kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-service-ca-bundle") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881399 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config podName:f9ea79ab-de4c-4165-82d4-84b9d73df5a4 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381394244 +0000 UTC m=+141.442544717 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config") pod "route-controller-manager-6576b87f9c-hvmkj" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881407 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381403554 +0000 UTC m=+141.442554027 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-session" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881416 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381412674 +0000 UTC m=+141.442563147 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881108 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-login: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881147 4961 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881211 4961 configmap.go:193] Couldn't get configMap openshift-controller-manager/openshift-global-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881263 4961 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/audit-1: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881425 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config podName:475e25de-63ce-4cae-8fc6-4c057d616247 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381421005 +0000 UTC m=+141.442571478 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config") pod "machine-api-operator-5694c8668f-6px8t" (UID: "475e25de-63ce-4cae-8fc6-4c057d616247") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881600 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381584948 +0000 UTC m=+141.442735431 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-router-certs" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881644 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381631919 +0000 UTC m=+141.442782412 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881144 4961 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/etcd-serving-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881662 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/475e25de-63ce-4cae-8fc6-4c057d616247-machine-api-operator-tls podName:475e25de-63ce-4cae-8fc6-4c057d616247 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.3816543 +0000 UTC m=+141.442804793 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "machine-api-operator-tls" (UniqueName: "kubernetes.io/secret/475e25de-63ce-4cae-8fc6-4c057d616247-machine-api-operator-tls") pod "machine-api-operator-5694c8668f-6px8t" (UID: "475e25de-63ce-4cae-8fc6-4c057d616247") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881677 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.38166918 +0000 UTC m=+141.442819673 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881713 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381684131 +0000 UTC m=+141.442834624 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881728 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-serving-cert podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381720601 +0000 UTC m=+141.442871084 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-serving-cert") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881743 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381735872 +0000 UTC m=+141.442886355 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-service-ca" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881759 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca podName:f9ea79ab-de4c-4165-82d4-84b9d73df5a4 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381750962 +0000 UTC m=+141.442901455 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca") pod "route-controller-manager-6576b87f9c-hvmkj" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881806 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-encryption-config podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381768573 +0000 UTC m=+141.442919066 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "encryption-config" (UniqueName: "kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-encryption-config") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881836 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381826774 +0000 UTC m=+141.442977267 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881859 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-image-registry-operator-tls podName:d1f2c38b-9894-4145-bb3f-a38bfaf9e16e nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381851224 +0000 UTC m=+141.443001717 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "image-registry-operator-tls" (UniqueName: "kubernetes.io/secret/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-image-registry-operator-tls") pod "cluster-image-registry-operator-dc59b4c8b-nx5xr" (UID: "d1f2c38b-9894-4145-bb3f-a38bfaf9e16e") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881911 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381898185 +0000 UTC m=+141.443048678 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-serving-cert" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881938 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381927876 +0000 UTC m=+141.443078359 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-provider-selection" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.881995 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.381985227 +0000 UTC m=+141.443135710 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882017 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-config podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382008058 +0000 UTC m=+141.443158541 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-config") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882034 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca podName:d1f2c38b-9894-4145-bb3f-a38bfaf9e16e nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382026128 +0000 UTC m=+141.443176611 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca" (UniqueName: "kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca") pod "cluster-image-registry-operator-dc59b4c8b-nx5xr" (UID: "d1f2c38b-9894-4145-bb3f-a38bfaf9e16e") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882093 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-images podName:475e25de-63ce-4cae-8fc6-4c057d616247 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.38208184 +0000 UTC m=+141.443232343 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "images" (UniqueName: "kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-images") pod "machine-api-operator-5694c8668f-6px8t" (UID: "475e25de-63ce-4cae-8fc6-4c057d616247") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882121 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.38211182 +0000 UTC m=+141.443262313 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-system-cliconfig" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882178 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382167552 +0000 UTC m=+141.443318035 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882197 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382188452 +0000 UTC m=+141.443338945 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-idp-0-file-data" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882253 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382212533 +0000 UTC m=+141.443363026 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-error" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882284 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382273314 +0000 UTC m=+141.443423817 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-login" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882309 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382300056 +0000 UTC m=+141.443450539 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882369 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382358287 +0000 UTC m=+141.443508780 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "proxy-ca-bundles" (UniqueName: "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882397 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382387358 +0000 UTC m=+141.443537841 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: E1205 17:35:34.882459 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:35.382449099 +0000 UTC m=+141.443599592 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etcd-serving-ca" (UniqueName: "kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.892495 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.911449 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.931478 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.952463 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.971156 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 17:35:34 crc kubenswrapper[4961]: I1205 17:35:34.990874 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.011328 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.030974 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.051122 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.072750 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.091743 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.118516 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.131997 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.152088 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.172990 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.191667 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.212616 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.231313 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.270565 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k4gjg\" (UniqueName: \"kubernetes.io/projected/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-kube-api-access-k4gjg\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.318580 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409030 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409094 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409143 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409189 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409259 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-service-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409297 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409365 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409435 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409504 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409542 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409582 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-config\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409621 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409658 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409841 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409900 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409927 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-encryption-config\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409953 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.409988 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/475e25de-63ce-4cae-8fc6-4c057d616247-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410017 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410071 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-images\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410104 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410129 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410178 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410203 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410226 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410255 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410291 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410316 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410340 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410368 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410421 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410460 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-serving-cert\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410524 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.410720 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.432587 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.451004 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.478226 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.491559 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.513557 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.532029 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.552946 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.571990 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.591501 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.612727 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.631085 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.651842 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.671756 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.693510 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.712377 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.731482 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.765433 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr47f\" (UniqueName: \"kubernetes.io/projected/65337aa1-ef5b-4daa-8e0e-33e8ff67f85b-kube-api-access-wr47f\") pod \"etcd-operator-b45778765-6pfr2\" (UID: \"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b\") " pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.786562 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8hnqz\" (UniqueName: \"kubernetes.io/projected/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-kube-api-access-8hnqz\") pod \"console-f9d7485db-sp65b\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.792174 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.811683 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.831882 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.853755 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.871945 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.889514 4961 request.go:700] Waited for 1.84658212s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-dns/secrets?fieldSelector=metadata.name%3Ddns-default-metrics-tls&limit=500&resourceVersion=0 Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.891880 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.912086 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.931872 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.952276 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 17:35:35 crc kubenswrapper[4961]: I1205 17:35:35.971818 4961 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.004442 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.010993 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.018338 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.021220 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.031275 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.050506 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.063325 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.072167 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.085228 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-serving-cert\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.091883 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.113673 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.133621 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.152228 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.162717 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-images\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.172670 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.193011 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.206236 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/475e25de-63ce-4cae-8fc6-4c057d616247-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.212605 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.232192 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.240309 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-config\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.256947 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.261655 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.273042 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.280126 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-service-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.287363 4961 projected.go:288] Couldn't get configMap openshift-authentication/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.291718 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.311808 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.322953 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-encryption-config\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.327282 4961 projected.go:288] Couldn't get configMap openshift-route-controller-manager/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.332133 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.337214 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gdbnr\" (UniqueName: \"kubernetes.io/projected/892bdd9c-95f3-450b-9b7b-917e481f0d6f-kube-api-access-gdbnr\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.349582 4961 projected.go:288] Couldn't get configMap openshift-authentication-operator/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.349625 4961 projected.go:194] Error preparing data for projected volume kube-api-access-wxxhd for pod openshift-authentication-operator/authentication-operator-69f744f599-cxwzt: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.349678 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e44fec59-fa03-4dd6-be86-108902060c91-kube-api-access-wxxhd podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:36.849661315 +0000 UTC m=+142.910811788 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-wxxhd" (UniqueName: "kubernetes.io/projected/e44fec59-fa03-4dd6-be86-108902060c91-kube-api-access-wxxhd") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.352704 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.387803 4961 projected.go:288] Couldn't get configMap openshift-controller-manager/kube-root-ca.crt: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.387875 4961 projected.go:194] Error preparing data for projected volume kube-api-access-xcpz7 for pod openshift-controller-manager/controller-manager-879f6c89f-cc7mk: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.387949 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7 podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:36.887929583 +0000 UTC m=+142.949080056 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-xcpz7" (UniqueName: "kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.396263 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.403700 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409308 4961 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/client-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409445 4961 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/etcd-serving-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409486 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-router-certs: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409501 4961 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-service-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409551 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409517275 +0000 UTC m=+143.470667788 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etcd-serving-ca" (UniqueName: "kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409578 4961 configmap.go:193] Couldn't get configMap openshift-route-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409600 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409571016 +0000 UTC m=+143.470721579 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-router-certs" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409650 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409638237 +0000 UTC m=+143.470788800 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-service-ca" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409672 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config podName:f9ea79ab-de4c-4165-82d4-84b9d73df5a4 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409662578 +0000 UTC m=+143.470813161 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config") pod "route-controller-manager-6576b87f9c-hvmkj" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409351 4961 secret.go:188] Couldn't get secret openshift-authentication-operator/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409718 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409703529 +0000 UTC m=+143.470854102 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409735 4961 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409748 4961 secret.go:188] Couldn't get secret openshift-controller-manager/serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409807 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.40976482 +0000 UTC m=+143.470915423 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409830 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409820512 +0000 UTC m=+143.470971105 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "serving-cert" (UniqueName: "kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409848 4961 configmap.go:193] Couldn't get configMap openshift-controller-manager/config: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.409884 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config podName:20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.409873114 +0000 UTC m=+143.471023687 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config") pod "controller-manager-879f6c89f-cc7mk" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410123 4961 configmap.go:193] Couldn't get configMap openshift-authentication-operator/trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410154 4961 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-cliconfig: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410374 4961 configmap.go:193] Couldn't get configMap openshift-machine-api/kube-rbac-proxy: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410458 4961 configmap.go:193] Couldn't get configMap openshift-image-registry/trusted-ca: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410471 4961 configmap.go:193] Couldn't get configMap openshift-authentication/audit: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410490 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-serving-cert: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410397 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-ocp-branding-template: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410501 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-idp-0-file-data: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410518 4961 configmap.go:193] Couldn't get configMap openshift-oauth-apiserver/audit-1: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410533 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-system-session: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410416 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-provider-selection: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410435 4961 configmap.go:193] Couldn't get configMap openshift-authentication/v4-0-config-system-trusted-ca-bundle: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410465 4961 secret.go:188] Couldn't get secret openshift-oauth-apiserver/etcd-client: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410642 4961 secret.go:188] Couldn't get secret openshift-authentication/v4-0-config-user-template-error: failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.410965 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca podName:f9ea79ab-de4c-4165-82d4-84b9d73df5a4 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.410948178 +0000 UTC m=+143.472098731 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "client-ca" (UniqueName: "kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca") pod "route-controller-manager-6576b87f9c-hvmkj" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411111 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle podName:e44fec59-fa03-4dd6-be86-108902060c91 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411099192 +0000 UTC m=+143.472249795 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle") pod "authentication-operator-69f744f599-cxwzt" (UID: "e44fec59-fa03-4dd6-be86-108902060c91") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411224 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411214175 +0000 UTC m=+143.472364738 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-cliconfig" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411377 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config podName:475e25de-63ce-4cae-8fc6-4c057d616247 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411310168 +0000 UTC m=+143.472460641 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config" (UniqueName: "kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config") pod "machine-api-operator-5694c8668f-6px8t" (UID: "475e25de-63ce-4cae-8fc6-4c057d616247") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411482 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca podName:d1f2c38b-9894-4145-bb3f-a38bfaf9e16e nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411472672 +0000 UTC m=+143.472623145 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "trusted-ca" (UniqueName: "kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca") pod "cluster-image-registry-operator-dc59b4c8b-nx5xr" (UID: "d1f2c38b-9894-4145-bb3f-a38bfaf9e16e") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411608 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411594394 +0000 UTC m=+143.472744927 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411732 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411719237 +0000 UTC m=+143.472869710 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-serving-cert" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411835 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.4118217 +0000 UTC m=+143.472972253 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-ocp-branding-template" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.411932 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.411922412 +0000 UTC m=+143.473072965 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "audit-policies" (UniqueName: "kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.412025 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.412014194 +0000 UTC m=+143.473164667 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-idp-0-file-data" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.412105 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.412095526 +0000 UTC m=+143.473246109 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-session" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.412182 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.412172908 +0000 UTC m=+143.473323391 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-provider-selection" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.412264 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.412253729 +0000 UTC m=+143.473404202 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-system-trusted-ca-bundle" (UniqueName: "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.412373 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client podName:892bdd9c-95f3-450b-9b7b-917e481f0d6f nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.412362952 +0000 UTC m=+143.473513505 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etcd-client" (UniqueName: "kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client") pod "apiserver-7bbb656c7d-ld8sp" (UID: "892bdd9c-95f3-450b-9b7b-917e481f0d6f") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.412482 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.412472535 +0000 UTC m=+143.473623008 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "v4-0-config-user-template-error" (UniqueName: "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync secret cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.706069 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.716476 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.716866 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.716953 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.718614 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.718566 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.718643 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-bound-sa-token\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.718957 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-serving-cert\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.719193 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-tls\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.719267 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-certificates\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.719418 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-trusted-ca\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.719556 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.719579 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.719762 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.720301 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.721622 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tbv5x\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-kube-api-access-tbv5x\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.720504 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.725138 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.725393 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.725426 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zqx4\" (UniqueName: \"kubernetes.io/projected/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-kube-api-access-8zqx4\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.725937 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.725965 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.225938585 +0000 UTC m=+143.287089158 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.726463 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.726479 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.727005 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.731394 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.732653 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.737464 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.738968 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.752297 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.752648 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-sp65b"] Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.754021 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-6pfr2"] Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.771297 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.791384 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.811464 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.827762 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.828010 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-image-import-ca\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.828388 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pk74j\" (UniqueName: \"kubernetes.io/projected/694a4dea-466c-48de-b828-dded0b4e3309-kube-api-access-pk74j\") pod \"downloads-7954f5f757-2msfx\" (UID: \"694a4dea-466c-48de-b828-dded0b4e3309\") " pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.828426 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75749479-6797-4377-8ce6-5c329f7e7a78-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.828657 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.328530101 +0000 UTC m=+143.389680574 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.828873 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bfd6bf8b-a642-4717-a3e5-5e972e49e665-auth-proxy-config\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.828969 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltk9l\" (UniqueName: \"kubernetes.io/projected/4a0cedd3-c46f-462a-8cd9-54629e7cb576-kube-api-access-ltk9l\") pod \"cluster-samples-operator-665b6dd947-vpcxg\" (UID: \"4a0cedd3-c46f-462a-8cd9-54629e7cb576\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.829113 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-socket-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.829379 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c203f16b-553f-4282-92bb-6b7af8dfc31d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.829413 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-registration-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.829588 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75749479-6797-4377-8ce6-5c329f7e7a78-config\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.829683 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a0cedd3-c46f-462a-8cd9-54629e7cb576-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vpcxg\" (UID: \"4a0cedd3-c46f-462a-8cd9-54629e7cb576\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.829938 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-tmpfs\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830029 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zxdz\" (UniqueName: \"kubernetes.io/projected/1c4ea533-79da-4be4-a70c-6de0d3b6c120-kube-api-access-8zxdz\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830099 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6md2g\" (UniqueName: \"kubernetes.io/projected/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-kube-api-access-6md2g\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830135 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-plugins-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830264 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-serving-cert\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830340 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-bound-sa-token\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830382 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jbhk6\" (UniqueName: \"kubernetes.io/projected/3733b15a-cd45-418e-9452-79a33535ce35-kube-api-access-jbhk6\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830505 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcdb8e2-ea42-41ca-815d-058d299f96bd-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830545 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/901195d1-b2c7-4ddb-831c-7f5efca5c758-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830611 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830686 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-serving-cert\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830758 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qw6tb\" (UniqueName: \"kubernetes.io/projected/37e0f3d5-9ee5-4aac-aaef-205d8466b84a-kube-api-access-qw6tb\") pod \"ingress-canary-hg4b6\" (UID: \"37e0f3d5-9ee5-4aac-aaef-205d8466b84a\") " pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830826 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df860690-ba32-4a90-ab42-80ae129b935a-metrics-tls\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830893 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-tls\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830924 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-trusted-ca\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.830987 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-etcd-client\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831023 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75749479-6797-4377-8ce6-5c329f7e7a78-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831100 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-config\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831159 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-26kqr\" (UniqueName: \"kubernetes.io/projected/c203f16b-553f-4282-92bb-6b7af8dfc31d-kube-api-access-26kqr\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831215 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831251 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831308 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/114cc896-bf47-4c02-b478-3bb3b3aaa729-metrics-tls\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831356 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-service-ca-bundle\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.831564 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832093 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-available-featuregates\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832206 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd6bf8b-a642-4717-a3e5-5e972e49e665-config\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832233 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-proxy-tls\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832293 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f02aa74-e797-4fe5-8412-0a64e64730c1-config\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832757 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dd7g\" (UniqueName: \"kubernetes.io/projected/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-kube-api-access-8dd7g\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832857 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/af42c194-0d59-4fbb-ab5c-777172b46fd8-metrics-tls\") pod \"dns-operator-744455d44c-vg677\" (UID: \"af42c194-0d59-4fbb-ab5c-777172b46fd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832925 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-apiservice-cert\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832946 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-csi-data-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.832964 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-default-certificate\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833084 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ws8s4\" (UniqueName: \"kubernetes.io/projected/faedd27a-ff89-4838-a73a-51e1c194f2e9-kube-api-access-ws8s4\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833115 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833161 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-encryption-config\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833180 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833201 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gsjnz\" (UniqueName: \"kubernetes.io/projected/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-kube-api-access-gsjnz\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833210 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-trusted-ca\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833384 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5dcdb8e2-ea42-41ca-815d-058d299f96bd-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833423 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-etcd-serving-ca\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833474 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f02aa74-e797-4fe5-8412-0a64e64730c1-serving-cert\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833500 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fhbwx\" (UniqueName: \"kubernetes.io/projected/9f02aa74-e797-4fe5-8412-0a64e64730c1-kube-api-access-fhbwx\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833561 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zvc9m\" (UniqueName: \"kubernetes.io/projected/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-kube-api-access-zvc9m\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.833725 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834065 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zqx4\" (UniqueName: \"kubernetes.io/projected/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-kube-api-access-8zqx4\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834152 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-serving-cert\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834205 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-stats-auth\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.834240 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.334211506 +0000 UTC m=+143.395361979 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834314 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gs5nt\" (UniqueName: \"kubernetes.io/projected/df860690-ba32-4a90-ab42-80ae129b935a-kube-api-access-gs5nt\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834360 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834450 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-mountpoint-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834514 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/901195d1-b2c7-4ddb-831c-7f5efca5c758-config\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834534 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-smx5z\" (UniqueName: \"kubernetes.io/projected/bfd6bf8b-a642-4717-a3e5-5e972e49e665-kube-api-access-smx5z\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834551 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834570 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/faedd27a-ff89-4838-a73a-51e1c194f2e9-profile-collector-cert\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834587 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5xjf\" (UniqueName: \"kubernetes.io/projected/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-kube-api-access-j5xjf\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834627 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834645 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c203f16b-553f-4282-92bb-6b7af8dfc31d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834671 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f02aa74-e797-4fe5-8412-0a64e64730c1-trusted-ca\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834688 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/37e0f3d5-9ee5-4aac-aaef-205d8466b84a-cert\") pod \"ingress-canary-hg4b6\" (UID: \"37e0f3d5-9ee5-4aac-aaef-205d8466b84a\") " pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834708 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-74k5k\" (UniqueName: \"kubernetes.io/projected/5dcdb8e2-ea42-41ca-815d-058d299f96bd-kube-api-access-74k5k\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834731 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-audit\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834784 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g78rm\" (UniqueName: \"kubernetes.io/projected/af42c194-0d59-4fbb-ab5c-777172b46fd8-kube-api-access-g78rm\") pod \"dns-operator-744455d44c-vg677\" (UID: \"af42c194-0d59-4fbb-ab5c-777172b46fd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834811 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45c41860-47e0-4fc6-b9e2-73308ab35bfe-secret-volume\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834853 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/114cc896-bf47-4c02-b478-3bb3b3aaa729-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834879 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45c41860-47e0-4fc6-b9e2-73308ab35bfe-config-volume\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834900 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-certificates\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834926 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-images\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834942 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7vr9\" (UniqueName: \"kubernetes.io/projected/45c41860-47e0-4fc6-b9e2-73308ab35bfe-kube-api-access-j7vr9\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834967 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmmnp\" (UniqueName: \"kubernetes.io/projected/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-kube-api-access-mmmnp\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.834999 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-metrics-certs\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835018 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-config\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835035 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j229g\" (UniqueName: \"kubernetes.io/projected/114cc896-bf47-4c02-b478-3bb3b3aaa729-kube-api-access-j229g\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835067 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/bfd6bf8b-a642-4717-a3e5-5e972e49e665-machine-approver-tls\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835091 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-audit-dir\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835111 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tbv5x\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-kube-api-access-tbv5x\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835155 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835176 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c4ea533-79da-4be4-a70c-6de0d3b6c120-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835194 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835226 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/114cc896-bf47-4c02-b478-3bb3b3aaa729-trusted-ca\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835242 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-webhook-cert\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835258 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/faedd27a-ff89-4838-a73a-51e1c194f2e9-srv-cert\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835273 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c4ea533-79da-4be4-a70c-6de0d3b6c120-config\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835288 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df860690-ba32-4a90-ab42-80ae129b935a-config-volume\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835326 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/901195d1-b2c7-4ddb-831c-7f5efca5c758-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.835341 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-node-pullsecrets\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.836947 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-ca-trust-extracted\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.838227 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-certificates\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.838802 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-serving-cert\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.838883 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-tls\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.839349 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-installation-pull-secrets\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.851256 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.871807 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.890938 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.894131 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" event={"ID":"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b","Type":"ContainerStarted","Data":"229d6430e2cd4d9fc0acb3ba43d02d24d68dfd3bc373adf6efe480a269430089"} Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.895291 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sp65b" event={"ID":"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee","Type":"ContainerStarted","Data":"2d2029e80d8596f43285e2dc5669455097f1d3efa1f2cebe2650534078b240d7"} Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.897789 4961 projected.go:194] Error preparing data for projected volume kube-api-access-vbxd4 for pod openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.897861 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4 podName:f9ea79ab-de4c-4165-82d4-84b9d73df5a4 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.397842185 +0000 UTC m=+143.458992658 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-vbxd4" (UniqueName: "kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4") pod "route-controller-manager-6576b87f9c-hvmkj" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.910188 4961 request.go:700] Waited for 1.615187308s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/configmaps?fieldSelector=metadata.name%3Daudit&limit=500&resourceVersion=0 Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.912686 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.932007 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936138 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936246 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dd7g\" (UniqueName: \"kubernetes.io/projected/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-kube-api-access-8dd7g\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.936303 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.436283517 +0000 UTC m=+143.497433990 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936334 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-csi-data-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936360 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bzrhq\" (UniqueName: \"kubernetes.io/projected/e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7-kube-api-access-bzrhq\") pod \"package-server-manager-789f6589d5-6fzkv\" (UID: \"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936386 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/af42c194-0d59-4fbb-ab5c-777172b46fd8-metrics-tls\") pod \"dns-operator-744455d44c-vg677\" (UID: \"af42c194-0d59-4fbb-ab5c-777172b46fd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936404 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-apiservice-cert\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936447 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-default-certificate\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936476 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936502 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gsjnz\" (UniqueName: \"kubernetes.io/projected/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-kube-api-access-gsjnz\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936528 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ws8s4\" (UniqueName: \"kubernetes.io/projected/faedd27a-ff89-4838-a73a-51e1c194f2e9-kube-api-access-ws8s4\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936553 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-encryption-config\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936578 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5dcdb8e2-ea42-41ca-815d-058d299f96bd-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936600 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-etcd-serving-ca\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936618 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-csi-data-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936633 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f02aa74-e797-4fe5-8412-0a64e64730c1-serving-cert\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936929 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fhbwx\" (UniqueName: \"kubernetes.io/projected/9f02aa74-e797-4fe5-8412-0a64e64730c1-kube-api-access-fhbwx\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936964 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zvc9m\" (UniqueName: \"kubernetes.io/projected/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-kube-api-access-zvc9m\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.936998 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-serving-cert\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937029 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937097 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/601ba962-a964-472d-b481-4946fd7265b1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5hmrk\" (UID: \"601ba962-a964-472d-b481-4946fd7265b1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937117 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-signing-cabundle\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937143 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gs5nt\" (UniqueName: \"kubernetes.io/projected/df860690-ba32-4a90-ab42-80ae129b935a-kube-api-access-gs5nt\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937167 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-stats-auth\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937247 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937293 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v27k8\" (UniqueName: \"kubernetes.io/projected/02fabe40-bb13-4cec-af0f-0c08d46f511d-kube-api-access-v27k8\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937339 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-mountpoint-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937364 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2af64acf-53d5-4288-b53f-e889631a7ee4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xlpsm\" (UID: \"2af64acf-53d5-4288-b53f-e889631a7ee4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937425 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-smx5z\" (UniqueName: \"kubernetes.io/projected/bfd6bf8b-a642-4717-a3e5-5e972e49e665-kube-api-access-smx5z\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937451 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937479 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/901195d1-b2c7-4ddb-831c-7f5efca5c758-config\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.937504 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.437488336 +0000 UTC m=+143.498638919 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937529 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/faedd27a-ff89-4838-a73a-51e1c194f2e9-profile-collector-cert\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937561 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c203f16b-553f-4282-92bb-6b7af8dfc31d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937586 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5xjf\" (UniqueName: \"kubernetes.io/projected/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-kube-api-access-j5xjf\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937612 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxxzs\" (UniqueName: \"kubernetes.io/projected/3d4d981b-d365-4cc1-ac62-0a4237be8bdf-kube-api-access-nxxzs\") pod \"migrator-59844c95c7-mwbff\" (UID: \"3d4d981b-d365-4cc1-ac62-0a4237be8bdf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937656 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f02aa74-e797-4fe5-8412-0a64e64730c1-trusted-ca\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937683 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/37e0f3d5-9ee5-4aac-aaef-205d8466b84a-cert\") pod \"ingress-canary-hg4b6\" (UID: \"37e0f3d5-9ee5-4aac-aaef-205d8466b84a\") " pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937709 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-74k5k\" (UniqueName: \"kubernetes.io/projected/5dcdb8e2-ea42-41ca-815d-058d299f96bd-kube-api-access-74k5k\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937729 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-audit\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937761 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/02fabe40-bb13-4cec-af0f-0c08d46f511d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937812 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wxxhd\" (UniqueName: \"kubernetes.io/projected/e44fec59-fa03-4dd6-be86-108902060c91-kube-api-access-wxxhd\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937836 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g78rm\" (UniqueName: \"kubernetes.io/projected/af42c194-0d59-4fbb-ab5c-777172b46fd8-kube-api-access-g78rm\") pod \"dns-operator-744455d44c-vg677\" (UID: \"af42c194-0d59-4fbb-ab5c-777172b46fd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937862 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45c41860-47e0-4fc6-b9e2-73308ab35bfe-secret-volume\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937902 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwwg8\" (UniqueName: \"kubernetes.io/projected/601ba962-a964-472d-b481-4946fd7265b1-kube-api-access-vwwg8\") pod \"control-plane-machine-set-operator-78cbb6b69f-5hmrk\" (UID: \"601ba962-a964-472d-b481-4946fd7265b1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937927 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/114cc896-bf47-4c02-b478-3bb3b3aaa729-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937964 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45c41860-47e0-4fc6-b9e2-73308ab35bfe-config-volume\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.937992 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7vr9\" (UniqueName: \"kubernetes.io/projected/45c41860-47e0-4fc6-b9e2-73308ab35bfe-kube-api-access-j7vr9\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938026 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-images\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938050 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmmnp\" (UniqueName: \"kubernetes.io/projected/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-kube-api-access-mmmnp\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938085 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-metrics-certs\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938113 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z9htm\" (UniqueName: \"kubernetes.io/projected/2af64acf-53d5-4288-b53f-e889631a7ee4-kube-api-access-z9htm\") pod \"multus-admission-controller-857f4d67dd-xlpsm\" (UID: \"2af64acf-53d5-4288-b53f-e889631a7ee4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938139 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-proxy-tls\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938176 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/bfd6bf8b-a642-4717-a3e5-5e972e49e665-machine-approver-tls\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938201 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-config\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938225 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j229g\" (UniqueName: \"kubernetes.io/projected/114cc896-bf47-4c02-b478-3bb3b3aaa729-kube-api-access-j229g\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938272 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/901195d1-b2c7-4ddb-831c-7f5efca5c758-config\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938280 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-trusted-ca-bundle\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938309 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-auth-proxy-config\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938379 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-mountpoint-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938976 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-images\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.938983 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-audit\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.939121 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcpz7\" (UniqueName: \"kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.939172 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-audit-dir\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.939562 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-config\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.940150 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvtw9\" (UniqueName: \"kubernetes.io/projected/ce0eed91-28af-4966-b71a-3dda1df5d6cb-kube-api-access-rvtw9\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.940266 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-audit-dir\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.940281 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45c41860-47e0-4fc6-b9e2-73308ab35bfe-config-volume\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.940329 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/af42c194-0d59-4fbb-ab5c-777172b46fd8-metrics-tls\") pod \"dns-operator-744455d44c-vg677\" (UID: \"af42c194-0d59-4fbb-ab5c-777172b46fd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.940751 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-encryption-config\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941703 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9f02aa74-e797-4fe5-8412-0a64e64730c1-trusted-ca\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941757 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6fzkv\" (UID: \"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941810 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941834 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c4ea533-79da-4be4-a70c-6de0d3b6c120-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941855 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941880 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/114cc896-bf47-4c02-b478-3bb3b3aaa729-trusted-ca\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.941905 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-webhook-cert\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.942285 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9f02aa74-e797-4fe5-8412-0a64e64730c1-serving-cert\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.942502 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.942707 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-serving-cert\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.942938 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c203f16b-553f-4282-92bb-6b7af8dfc31d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943377 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/faedd27a-ff89-4838-a73a-51e1c194f2e9-srv-cert\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943448 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/02fabe40-bb13-4cec-af0f-0c08d46f511d-srv-cert\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943565 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-etcd-serving-ca\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943645 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/901195d1-b2c7-4ddb-831c-7f5efca5c758-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943767 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-node-pullsecrets\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943828 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c4ea533-79da-4be4-a70c-6de0d3b6c120-config\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943854 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5dcdb8e2-ea42-41ca-815d-058d299f96bd-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943903 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-node-pullsecrets\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.943926 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df860690-ba32-4a90-ab42-80ae129b935a-config-volume\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944039 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ce0eed91-28af-4966-b71a-3dda1df5d6cb-node-bootstrap-token\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944121 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bfd6bf8b-a642-4717-a3e5-5e972e49e665-auth-proxy-config\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944186 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltk9l\" (UniqueName: \"kubernetes.io/projected/4a0cedd3-c46f-462a-8cd9-54629e7cb576-kube-api-access-ltk9l\") pod \"cluster-samples-operator-665b6dd947-vpcxg\" (UID: \"4a0cedd3-c46f-462a-8cd9-54629e7cb576\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944212 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-image-import-ca\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944263 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pk74j\" (UniqueName: \"kubernetes.io/projected/694a4dea-466c-48de-b828-dded0b4e3309-kube-api-access-pk74j\") pod \"downloads-7954f5f757-2msfx\" (UID: \"694a4dea-466c-48de-b828-dded0b4e3309\") " pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944292 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75749479-6797-4377-8ce6-5c329f7e7a78-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944682 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/df860690-ba32-4a90-ab42-80ae129b935a-config-volume\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944755 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-socket-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944801 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c203f16b-553f-4282-92bb-6b7af8dfc31d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944828 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-registration-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944898 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75749479-6797-4377-8ce6-5c329f7e7a78-config\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944936 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a0cedd3-c46f-462a-8cd9-54629e7cb576-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vpcxg\" (UID: \"4a0cedd3-c46f-462a-8cd9-54629e7cb576\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.944974 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-tmpfs\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.945919 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/114cc896-bf47-4c02-b478-3bb3b3aaa729-trusted-ca\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.945943 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4vkq\" (UniqueName: \"kubernetes.io/projected/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-kube-api-access-q4vkq\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946151 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-plugins-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946241 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/bfd6bf8b-a642-4717-a3e5-5e972e49e665-auth-proxy-config\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946272 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.945685 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-socket-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946439 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/bfd6bf8b-a642-4717-a3e5-5e972e49e665-machine-approver-tls\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946518 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-image-import-ca\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946663 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1c4ea533-79da-4be4-a70c-6de0d3b6c120-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946832 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-plugins-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946906 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zxdz\" (UniqueName: \"kubernetes.io/projected/1c4ea533-79da-4be4-a70c-6de0d3b6c120-kube-api-access-8zxdz\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.946943 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6md2g\" (UniqueName: \"kubernetes.io/projected/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-kube-api-access-6md2g\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947021 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-signing-key\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947278 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-serving-cert\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947314 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/37e0f3d5-9ee5-4aac-aaef-205d8466b84a-cert\") pod \"ingress-canary-hg4b6\" (UID: \"37e0f3d5-9ee5-4aac-aaef-205d8466b84a\") " pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947321 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ce0eed91-28af-4966-b71a-3dda1df5d6cb-certs\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947383 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-registration-dir\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947407 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1c4ea533-79da-4be4-a70c-6de0d3b6c120-config\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947500 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jbhk6\" (UniqueName: \"kubernetes.io/projected/3733b15a-cd45-418e-9452-79a33535ce35-kube-api-access-jbhk6\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947714 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcdb8e2-ea42-41ca-815d-058d299f96bd-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947761 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/901195d1-b2c7-4ddb-831c-7f5efca5c758-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947808 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947841 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nk2pl\" (UniqueName: \"kubernetes.io/projected/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-kube-api-access-nk2pl\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947898 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qw6tb\" (UniqueName: \"kubernetes.io/projected/37e0f3d5-9ee5-4aac-aaef-205d8466b84a-kube-api-access-qw6tb\") pod \"ingress-canary-hg4b6\" (UID: \"37e0f3d5-9ee5-4aac-aaef-205d8466b84a\") " pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947901 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-tmpfs\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947921 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df860690-ba32-4a90-ab42-80ae129b935a-metrics-tls\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.947959 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/901195d1-b2c7-4ddb-831c-7f5efca5c758-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.948455 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-webhook-cert\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.948603 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.948671 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/75749479-6797-4377-8ce6-5c329f7e7a78-config\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.948725 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5dcdb8e2-ea42-41ca-815d-058d299f96bd-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.948922 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcpz7\" (UniqueName: \"kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.949028 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-etcd-client\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.949060 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75749479-6797-4377-8ce6-5c329f7e7a78-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.949664 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-config\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.949862 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-26kqr\" (UniqueName: \"kubernetes.io/projected/c203f16b-553f-4282-92bb-6b7af8dfc31d-kube-api-access-26kqr\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.949910 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/114cc896-bf47-4c02-b478-3bb3b3aaa729-metrics-tls\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.949910 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/4a0cedd3-c46f-462a-8cd9-54629e7cb576-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-vpcxg\" (UID: \"4a0cedd3-c46f-462a-8cd9-54629e7cb576\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.950162 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.950239 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd6bf8b-a642-4717-a3e5-5e972e49e665-config\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.950282 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-proxy-tls\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.950313 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-service-ca-bundle\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.950419 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f02aa74-e797-4fe5-8412-0a64e64730c1-config\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.950951 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/bfd6bf8b-a642-4717-a3e5-5e972e49e665-config\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.951047 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/faedd27a-ff89-4838-a73a-51e1c194f2e9-profile-collector-cert\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.951108 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-config\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.951056 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/faedd27a-ff89-4838-a73a-51e1c194f2e9-srv-cert\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.952122 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.952547 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9f02aa74-e797-4fe5-8412-0a64e64730c1-config\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.955908 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-serving-cert\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.955978 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-etcd-client\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.956087 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wxxhd\" (UniqueName: \"kubernetes.io/projected/e44fec59-fa03-4dd6-be86-108902060c91-kube-api-access-wxxhd\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.956443 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/df860690-ba32-4a90-ab42-80ae129b935a-metrics-tls\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.956509 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.956742 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-proxy-tls\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.957540 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.958049 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/114cc896-bf47-4c02-b478-3bb3b3aaa729-metrics-tls\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.958714 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-apiservice-cert\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.959748 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45c41860-47e0-4fc6-b9e2-73308ab35bfe-secret-volume\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.961638 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/75749479-6797-4377-8ce6-5c329f7e7a78-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.973665 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:35:36 crc kubenswrapper[4961]: I1205 17:35:36.992209 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.998208 4961 projected.go:194] Error preparing data for projected volume kube-api-access-4p96n for pod openshift-authentication/oauth-openshift-558db77b4-f9gdg: failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:36 crc kubenswrapper[4961]: E1205 17:35:36.998578 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n podName:dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6 nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.498555016 +0000 UTC m=+143.559705489 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-4p96n" (UniqueName: "kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n") pod "oauth-openshift-558db77b4-f9gdg" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6") : failed to sync configmap cache: timed out waiting for the condition Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.012366 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.031294 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.036962 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h2dzz\" (UniqueName: \"kubernetes.io/projected/475e25de-63ce-4cae-8fc6-4c057d616247-kube-api-access-h2dzz\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.052702 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.053053 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.053296 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.553261884 +0000 UTC m=+143.614412357 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.053521 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxxzs\" (UniqueName: \"kubernetes.io/projected/3d4d981b-d365-4cc1-ac62-0a4237be8bdf-kube-api-access-nxxzs\") pod \"migrator-59844c95c7-mwbff\" (UID: \"3d4d981b-d365-4cc1-ac62-0a4237be8bdf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.053688 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/02fabe40-bb13-4cec-af0f-0c08d46f511d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.053852 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwwg8\" (UniqueName: \"kubernetes.io/projected/601ba962-a964-472d-b481-4946fd7265b1-kube-api-access-vwwg8\") pod \"control-plane-machine-set-operator-78cbb6b69f-5hmrk\" (UID: \"601ba962-a964-472d-b481-4946fd7265b1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054017 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z9htm\" (UniqueName: \"kubernetes.io/projected/2af64acf-53d5-4288-b53f-e889631a7ee4-kube-api-access-z9htm\") pod \"multus-admission-controller-857f4d67dd-xlpsm\" (UID: \"2af64acf-53d5-4288-b53f-e889631a7ee4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054148 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-proxy-tls\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054269 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvtw9\" (UniqueName: \"kubernetes.io/projected/ce0eed91-28af-4966-b71a-3dda1df5d6cb-kube-api-access-rvtw9\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054374 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6fzkv\" (UID: \"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054459 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/02fabe40-bb13-4cec-af0f-0c08d46f511d-srv-cert\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054529 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ce0eed91-28af-4966-b71a-3dda1df5d6cb-node-bootstrap-token\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054643 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4vkq\" (UniqueName: \"kubernetes.io/projected/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-kube-api-access-q4vkq\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054735 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054856 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-signing-key\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.054952 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ce0eed91-28af-4966-b71a-3dda1df5d6cb-certs\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055074 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nk2pl\" (UniqueName: \"kubernetes.io/projected/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-kube-api-access-nk2pl\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055214 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bzrhq\" (UniqueName: \"kubernetes.io/projected/e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7-kube-api-access-bzrhq\") pod \"package-server-manager-789f6589d5-6fzkv\" (UID: \"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055400 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055511 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/601ba962-a964-472d-b481-4946fd7265b1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5hmrk\" (UID: \"601ba962-a964-472d-b481-4946fd7265b1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055629 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-signing-cabundle\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055766 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v27k8\" (UniqueName: \"kubernetes.io/projected/02fabe40-bb13-4cec-af0f-0c08d46f511d-kube-api-access-v27k8\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.055873 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.056024 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2af64acf-53d5-4288-b53f-e889631a7ee4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xlpsm\" (UID: \"2af64acf-53d5-4288-b53f-e889631a7ee4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.056118 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.556097132 +0000 UTC m=+143.617247675 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.056884 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-signing-cabundle\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.057111 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/02fabe40-bb13-4cec-af0f-0c08d46f511d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.058491 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-proxy-tls\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.059387 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/ce0eed91-28af-4966-b71a-3dda1df5d6cb-certs\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.059836 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/02fabe40-bb13-4cec-af0f-0c08d46f511d-srv-cert\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.062454 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/601ba962-a964-472d-b481-4946fd7265b1-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-5hmrk\" (UID: \"601ba962-a964-472d-b481-4946fd7265b1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.062671 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-signing-key\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.062995 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/ce0eed91-28af-4966-b71a-3dda1df5d6cb-node-bootstrap-token\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.064049 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/2af64acf-53d5-4288-b53f-e889631a7ee4-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-xlpsm\" (UID: \"2af64acf-53d5-4288-b53f-e889631a7ee4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.089864 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c203f16b-553f-4282-92bb-6b7af8dfc31d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.089899 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-6fzkv\" (UID: \"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.092643 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-service-ca-bundle\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.095124 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-default-certificate\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.096698 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-stats-auth\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.097098 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-metrics-certs\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.108931 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-bound-sa-token\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.127794 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zqx4\" (UniqueName: \"kubernetes.io/projected/e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5-kube-api-access-8zqx4\") pod \"openshift-config-operator-7777fb866f-qtkvq\" (UID: \"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.147746 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tbv5x\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-kube-api-access-tbv5x\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.158267 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.158482 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.658454111 +0000 UTC m=+143.719604584 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.158882 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.159694 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.65968182 +0000 UTC m=+143.720832293 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.165074 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dd7g\" (UniqueName: \"kubernetes.io/projected/7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9-kube-api-access-8dd7g\") pod \"csi-hostpathplugin-wm6wm\" (UID: \"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9\") " pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.188055 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gsjnz\" (UniqueName: \"kubernetes.io/projected/d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d-kube-api-access-gsjnz\") pod \"packageserver-d55dfcdfc-jj8px\" (UID: \"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.201190 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.218257 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ws8s4\" (UniqueName: \"kubernetes.io/projected/faedd27a-ff89-4838-a73a-51e1c194f2e9-kube-api-access-ws8s4\") pod \"catalog-operator-68c6474976-4z6bw\" (UID: \"faedd27a-ff89-4838-a73a-51e1c194f2e9\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.229882 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zvc9m\" (UniqueName: \"kubernetes.io/projected/84d294ab-bfe4-4ec2-ba8a-6a5540d570b8-kube-api-access-zvc9m\") pod \"router-default-5444994796-nksfl\" (UID: \"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8\") " pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.245342 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fhbwx\" (UniqueName: \"kubernetes.io/projected/9f02aa74-e797-4fe5-8412-0a64e64730c1-kube-api-access-fhbwx\") pod \"console-operator-58897d9998-hlv6r\" (UID: \"9f02aa74-e797-4fe5-8412-0a64e64730c1\") " pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.260201 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.260333 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.760315269 +0000 UTC m=+143.821465742 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.260694 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.260883 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.261369 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.761358714 +0000 UTC m=+143.822509187 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.270107 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-74k5k\" (UniqueName: \"kubernetes.io/projected/5dcdb8e2-ea42-41ca-815d-058d299f96bd-kube-api-access-74k5k\") pod \"kube-storage-version-migrator-operator-b67b599dd-p6lkk\" (UID: \"5dcdb8e2-ea42-41ca-815d-058d299f96bd\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.295320 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmmnp\" (UniqueName: \"kubernetes.io/projected/1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2-kube-api-access-mmmnp\") pod \"apiserver-76f77b778f-8vtdg\" (UID: \"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2\") " pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.315520 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5xjf\" (UniqueName: \"kubernetes.io/projected/6c7ede11-6cf9-43d8-8165-7d5ce10f9adc-kube-api-access-j5xjf\") pod \"service-ca-operator-777779d784-74j6d\" (UID: \"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.336895 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j229g\" (UniqueName: \"kubernetes.io/projected/114cc896-bf47-4c02-b478-3bb3b3aaa729-kube-api-access-j229g\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.350557 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/47ebd001-fdad-4dcf-a6ed-6e32320cca9b-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-rt756\" (UID: \"47ebd001-fdad-4dcf-a6ed-6e32320cca9b\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.364424 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.364736 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.864705286 +0000 UTC m=+143.925855759 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.365230 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.365738 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.865715151 +0000 UTC m=+143.926865694 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.368280 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-smx5z\" (UniqueName: \"kubernetes.io/projected/bfd6bf8b-a642-4717-a3e5-5e972e49e665-kube-api-access-smx5z\") pod \"machine-approver-56656f9798-7xwcd\" (UID: \"bfd6bf8b-a642-4717-a3e5-5e972e49e665\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.370864 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.375149 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.387405 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.391343 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.392538 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gs5nt\" (UniqueName: \"kubernetes.io/projected/df860690-ba32-4a90-ab42-80ae129b935a-kube-api-access-gs5nt\") pod \"dns-default-mzkwv\" (UID: \"df860690-ba32-4a90-ab42-80ae129b935a\") " pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.394051 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.399006 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.407842 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.411029 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7vr9\" (UniqueName: \"kubernetes.io/projected/45c41860-47e0-4fc6-b9e2-73308ab35bfe-kube-api-access-j7vr9\") pod \"collect-profiles-29415930-s2vbd\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.427969 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g78rm\" (UniqueName: \"kubernetes.io/projected/af42c194-0d59-4fbb-ab5c-777172b46fd8-kube-api-access-g78rm\") pod \"dns-operator-744455d44c-vg677\" (UID: \"af42c194-0d59-4fbb-ab5c-777172b46fd8\") " pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:37 crc kubenswrapper[4961]: W1205 17:35:37.428447 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd9ae9f2f_9154_404d_9dfa_40cc81cb1e8d.slice/crio-7c5d5a55d708636cbfb41db33d059b44162e0d39a4b054c88f57c840f2b8493c WatchSource:0}: Error finding container 7c5d5a55d708636cbfb41db33d059b44162e0d39a4b054c88f57c840f2b8493c: Status 404 returned error can't find the container with id 7c5d5a55d708636cbfb41db33d059b44162e0d39a4b054c88f57c840f2b8493c Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.445247 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.454380 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/114cc896-bf47-4c02-b478-3bb3b3aaa729-bound-sa-token\") pod \"ingress-operator-5b745b69d9-hpvmn\" (UID: \"114cc896-bf47-4c02-b478-3bb3b3aaa729\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468369 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468680 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbxd4\" (UniqueName: \"kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468716 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468748 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468802 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468829 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468853 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468890 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468932 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.468965 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469071 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469096 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469139 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469185 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469206 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469250 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469273 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469396 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469438 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469462 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469504 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469535 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469565 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.469619 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:37.969577546 +0000 UTC m=+144.030728059 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469686 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.469733 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.470641 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.470809 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.472390 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.472514 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.477247 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-client\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.477539 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbxd4\" (UniqueName: \"kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.478091 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.478724 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.479795 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.480469 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/475e25de-63ce-4cae-8fc6-4c057d616247-config\") pod \"machine-api-operator-5694c8668f-6px8t\" (UID: \"475e25de-63ce-4cae-8fc6-4c057d616247\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.481154 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.481178 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.481482 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") pod \"controller-manager-879f6c89f-cc7mk\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.482728 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/892bdd9c-95f3-450b-9b7b-917e481f0d6f-audit-policies\") pod \"apiserver-7bbb656c7d-ld8sp\" (UID: \"892bdd9c-95f3-450b-9b7b-917e481f0d6f\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.485151 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e44fec59-fa03-4dd6-be86-108902060c91-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.485640 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.487071 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e44fec59-fa03-4dd6-be86-108902060c91-serving-cert\") pod \"authentication-operator-69f744f599-cxwzt\" (UID: \"e44fec59-fa03-4dd6-be86-108902060c91\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.487253 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.487667 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.487986 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.488699 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/d1f2c38b-9894-4145-bb3f-a38bfaf9e16e-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-nx5xr\" (UID: \"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.489194 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-wm6wm"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.490587 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.491021 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.494648 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") pod \"route-controller-manager-6576b87f9c-hvmkj\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.494922 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.495938 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pk74j\" (UniqueName: \"kubernetes.io/projected/694a4dea-466c-48de-b828-dded0b4e3309-kube-api-access-pk74j\") pod \"downloads-7954f5f757-2msfx\" (UID: \"694a4dea-466c-48de-b828-dded0b4e3309\") " pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.504150 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltk9l\" (UniqueName: \"kubernetes.io/projected/4a0cedd3-c46f-462a-8cd9-54629e7cb576-kube-api-access-ltk9l\") pod \"cluster-samples-operator-665b6dd947-vpcxg\" (UID: \"4a0cedd3-c46f-462a-8cd9-54629e7cb576\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.518269 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6md2g\" (UniqueName: \"kubernetes.io/projected/17b393b4-5451-4ef1-9486-2b1b6a70a1c3-kube-api-access-6md2g\") pod \"machine-config-operator-74547568cd-zswqb\" (UID: \"17b393b4-5451-4ef1-9486-2b1b6a70a1c3\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.518787 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.527944 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.532683 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zxdz\" (UniqueName: \"kubernetes.io/projected/1c4ea533-79da-4be4-a70c-6de0d3b6c120-kube-api-access-8zxdz\") pod \"openshift-apiserver-operator-796bbdcf4f-s4xcd\" (UID: \"1c4ea533-79da-4be4-a70c-6de0d3b6c120\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.540332 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.560256 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.563748 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jbhk6\" (UniqueName: \"kubernetes.io/projected/3733b15a-cd45-418e-9452-79a33535ce35-kube-api-access-jbhk6\") pod \"marketplace-operator-79b997595-c4krp\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.570395 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qw6tb\" (UniqueName: \"kubernetes.io/projected/37e0f3d5-9ee5-4aac-aaef-205d8466b84a-kube-api-access-qw6tb\") pod \"ingress-canary-hg4b6\" (UID: \"37e0f3d5-9ee5-4aac-aaef-205d8466b84a\") " pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.570755 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4p96n\" (UniqueName: \"kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.570946 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.571347 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.071331671 +0000 UTC m=+144.132482144 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.580104 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.583354 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4p96n\" (UniqueName: \"kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n\") pod \"oauth-openshift-558db77b4-f9gdg\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: W1205 17:35:37.585303 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7ed9f05b_c348_41bd_bce4_cc4ccc7c0bd9.slice/crio-7e9798c31a8a4450f9731e7a6f1ed020717b3f2df2f43c5ed8a1e734aec86e7e WatchSource:0}: Error finding container 7e9798c31a8a4450f9731e7a6f1ed020717b3f2df2f43c5ed8a1e734aec86e7e: Status 404 returned error can't find the container with id 7e9798c31a8a4450f9731e7a6f1ed020717b3f2df2f43c5ed8a1e734aec86e7e Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.589450 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/75749479-6797-4377-8ce6-5c329f7e7a78-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-jqd6w\" (UID: \"75749479-6797-4377-8ce6-5c329f7e7a78\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.615289 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/901195d1-b2c7-4ddb-831c-7f5efca5c758-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-5x2t5\" (UID: \"901195d1-b2c7-4ddb-831c-7f5efca5c758\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.631756 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-26kqr\" (UniqueName: \"kubernetes.io/projected/c203f16b-553f-4282-92bb-6b7af8dfc31d-kube-api-access-26kqr\") pod \"openshift-controller-manager-operator-756b6f6bc6-q89t5\" (UID: \"c203f16b-553f-4282-92bb-6b7af8dfc31d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.638373 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-vg677" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.649009 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.652589 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxxzs\" (UniqueName: \"kubernetes.io/projected/3d4d981b-d365-4cc1-ac62-0a4237be8bdf-kube-api-access-nxxzs\") pod \"migrator-59844c95c7-mwbff\" (UID: \"3d4d981b-d365-4cc1-ac62-0a4237be8bdf\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.653828 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.669214 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwwg8\" (UniqueName: \"kubernetes.io/projected/601ba962-a964-472d-b481-4946fd7265b1-kube-api-access-vwwg8\") pod \"control-plane-machine-set-operator-78cbb6b69f-5hmrk\" (UID: \"601ba962-a964-472d-b481-4946fd7265b1\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.673146 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.674014 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.173984677 +0000 UTC m=+144.235135170 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.675763 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.687915 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.693364 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z9htm\" (UniqueName: \"kubernetes.io/projected/2af64acf-53d5-4288-b53f-e889631a7ee4-kube-api-access-z9htm\") pod \"multus-admission-controller-857f4d67dd-xlpsm\" (UID: \"2af64acf-53d5-4288-b53f-e889631a7ee4\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.715073 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.723970 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.725640 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4vkq\" (UniqueName: \"kubernetes.io/projected/2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3-kube-api-access-q4vkq\") pod \"machine-config-controller-84d6567774-5ks25\" (UID: \"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.728013 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.728832 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvtw9\" (UniqueName: \"kubernetes.io/projected/ce0eed91-28af-4966-b71a-3dda1df5d6cb-kube-api-access-rvtw9\") pod \"machine-config-server-f6zsw\" (UID: \"ce0eed91-28af-4966-b71a-3dda1df5d6cb\") " pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.729714 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.734006 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.735562 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.752000 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nk2pl\" (UniqueName: \"kubernetes.io/projected/f9f9b3cf-6636-42ce-a11f-d85b976c33a5-kube-api-access-nk2pl\") pod \"service-ca-9c57cc56f-bghjx\" (UID: \"f9f9b3cf-6636-42ce-a11f-d85b976c33a5\") " pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.754642 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.755408 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.763656 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.770526 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.776546 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.776566 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.776890 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.27687583 +0000 UTC m=+144.338026293 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.780543 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.788451 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bzrhq\" (UniqueName: \"kubernetes.io/projected/e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7-kube-api-access-bzrhq\") pod \"package-server-manager-789f6589d5-6fzkv\" (UID: \"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.791988 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.798820 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v27k8\" (UniqueName: \"kubernetes.io/projected/02fabe40-bb13-4cec-af0f-0c08d46f511d-kube-api-access-v27k8\") pod \"olm-operator-6b444d44fb-znxrq\" (UID: \"02fabe40-bb13-4cec-af0f-0c08d46f511d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.798865 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.807898 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.809427 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.816309 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.826553 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-f6zsw" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.832302 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-hg4b6" Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.854641 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-hlv6r"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.878317 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.878932 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.378765929 +0000 UTC m=+144.439916402 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.894926 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.904422 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nksfl" event={"ID":"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8","Type":"ContainerStarted","Data":"fc2dc4d5d72c50593f78be825bae0b5bd4c32d58f8884417f5a787d2733387c2"} Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.916886 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" event={"ID":"bfd6bf8b-a642-4717-a3e5-5e972e49e665","Type":"ContainerStarted","Data":"6b935f5d2659b3e0ec412e0ad2433f59e8ba61daf1e7686341b56371117fa454"} Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.924757 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.925599 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" event={"ID":"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9","Type":"ContainerStarted","Data":"7e9798c31a8a4450f9731e7a6f1ed020717b3f2df2f43c5ed8a1e734aec86e7e"} Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.928686 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-8vtdg"] Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.942435 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sp65b" event={"ID":"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee","Type":"ContainerStarted","Data":"01177853a5e0acb043304353fbac8e83c80c7bb7d40d7a7190ceb58e3214faac"} Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.950145 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" event={"ID":"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d","Type":"ContainerStarted","Data":"7c5d5a55d708636cbfb41db33d059b44162e0d39a4b054c88f57c840f2b8493c"} Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.984981 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:37 crc kubenswrapper[4961]: E1205 17:35:37.986695 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.48667659 +0000 UTC m=+144.547827063 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:37 crc kubenswrapper[4961]: I1205 17:35:37.988856 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" event={"ID":"65337aa1-ef5b-4daa-8e0e-33e8ff67f85b","Type":"ContainerStarted","Data":"db11ed994375431f6a2aeb20bc1e9b51e4d7741ca7b4ab3a79447b94d08cc9ae"} Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.020848 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.072916 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.089001 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.089202 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.589156802 +0000 UTC m=+144.650307285 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.089765 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.090154 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.590146346 +0000 UTC m=+144.651296819 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.190947 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.191151 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.691128733 +0000 UTC m=+144.752279206 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.191221 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.191663 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.691646235 +0000 UTC m=+144.752796708 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.203819 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.254487 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2msfx"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.264644 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-74j6d"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.273192 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.292150 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.292342 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.792313224 +0000 UTC m=+144.853463697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.292733 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.293203 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.793180755 +0000 UTC m=+144.854331228 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.312035 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-vg677"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.318101 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-xlpsm"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.320298 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-mzkwv"] Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.393483 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.393861 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.893841774 +0000 UTC m=+144.954992257 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: W1205 17:35:38.467642 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2af64acf_53d5_4288_b53f_e889631a7ee4.slice/crio-89df1b99bd3f3e5426ae0723236fc8cb32482bf8b71c20eef7f1c10b3a4a7048 WatchSource:0}: Error finding container 89df1b99bd3f3e5426ae0723236fc8cb32482bf8b71c20eef7f1c10b3a4a7048: Status 404 returned error can't find the container with id 89df1b99bd3f3e5426ae0723236fc8cb32482bf8b71c20eef7f1c10b3a4a7048 Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.495585 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.498263 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:38.998243972 +0000 UTC m=+145.059394445 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.596498 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.597278 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.097255122 +0000 UTC m=+145.158405595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.698727 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.699174 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.199160451 +0000 UTC m=+145.260310924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.799768 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.800346 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.300321352 +0000 UTC m=+145.361471935 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:38 crc kubenswrapper[4961]: I1205 17:35:38.908100 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:38 crc kubenswrapper[4961]: E1205 17:35:38.908474 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.408457039 +0000 UTC m=+145.469607512 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.007003 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.009323 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.009767 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.509745993 +0000 UTC m=+145.570896466 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.022520 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" event={"ID":"45c41860-47e0-4fc6-b9e2-73308ab35bfe","Type":"ContainerStarted","Data":"be1484d44fb83eb6598038e5ffe1529c4bc75d386138bc34d3b5d2f8142e5688"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.029248 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2msfx" event={"ID":"694a4dea-466c-48de-b828-dded0b4e3309","Type":"ContainerStarted","Data":"cee5bb3b71783ec8c538c4f6dd2a01f35b0550154f332382ac86d9ae6592d980"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.038854 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" event={"ID":"faedd27a-ff89-4838-a73a-51e1c194f2e9","Type":"ContainerStarted","Data":"5d26643260d377502a3b4f1539b06986c9b0028558a2555d65e0a0ede05fa72c"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.044058 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" event={"ID":"9f02aa74-e797-4fe5-8412-0a64e64730c1","Type":"ContainerStarted","Data":"cae4c57df39a56028b46c851d611470b7db1496c2a9ec7efc14167aede3133c2"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.052904 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vg677" event={"ID":"af42c194-0d59-4fbb-ab5c-777172b46fd8","Type":"ContainerStarted","Data":"a0aab366e7dee9df3a71c3133d0decc7735584fd21f50ad20f48ca870b2ffd14"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.065231 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" event={"ID":"114cc896-bf47-4c02-b478-3bb3b3aaa729","Type":"ContainerStarted","Data":"a4ce6f0bf5b0f683c873c97ef82e5a63cccabbdb73ebd850a21345650d9135e1"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.072340 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-nksfl" event={"ID":"84d294ab-bfe4-4ec2-ba8a-6a5540d570b8","Type":"ContainerStarted","Data":"65de1389a0bc51b92bae38a8c03f2dc7b200a1d5501de57c942fbb55a1626d55"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.076416 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" event={"ID":"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5","Type":"ContainerStarted","Data":"4b8dba20531b9b33f9b43980241c44e73bf4d74691a9479923a9298fe0e915a0"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.082466 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" event={"ID":"d9ae9f2f-9154-404d-9dfa-40cc81cb1e8d","Type":"ContainerStarted","Data":"d56e01a917cec3d3ce75b582b86d7bb41d38225341d76c5eb4153a043089159b"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.082579 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.085287 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" event={"ID":"5dcdb8e2-ea42-41ca-815d-058d299f96bd","Type":"ContainerStarted","Data":"7913ef21c920fdca30bd757963e054688cb9b4e8a8ae5562353f92be54408010"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.094694 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" event={"ID":"47ebd001-fdad-4dcf-a6ed-6e32320cca9b","Type":"ContainerStarted","Data":"4766f659128f4daaeddb43ab8646a06add035ec4295cda2df72de6da933fe3db"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.101061 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" event={"ID":"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2","Type":"ContainerStarted","Data":"3143898296cc46b00dca2f9a8d5142231ef3698b76cd4c1e602c616f695b6311"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.103758 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" event={"ID":"2af64acf-53d5-4288-b53f-e889631a7ee4","Type":"ContainerStarted","Data":"89df1b99bd3f3e5426ae0723236fc8cb32482bf8b71c20eef7f1c10b3a4a7048"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.107742 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" event={"ID":"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc","Type":"ContainerStarted","Data":"6a38c0b31ba32a305cd546972bbf236df7591b789bb207ae16b50da9e5745b3e"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.109026 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-mzkwv" event={"ID":"df860690-ba32-4a90-ab42-80ae129b935a","Type":"ContainerStarted","Data":"995721cec8efdb829412aa553ad8ad7020f03d3cbf4470e1edc877eb49ab6a2a"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.110621 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.111302 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.611283823 +0000 UTC m=+145.672434386 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.113136 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-f6zsw" event={"ID":"ce0eed91-28af-4966-b71a-3dda1df5d6cb","Type":"ContainerStarted","Data":"a19495e49408c61a4e0015dd3837f2bb89ede92f75b8ec1f2684efc22f4dd803"} Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.215610 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.218767 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.718739964 +0000 UTC m=+145.779890447 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.279433 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.317665 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.318148 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.818131963 +0000 UTC m=+145.879282436 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.319229 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.337307 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-6px8t"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.343533 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.347548 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.387131 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cc7mk"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.393671 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv"] Dec 05 17:35:39 crc kubenswrapper[4961]: W1205 17:35:39.394987 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod75749479_6797_4377_8ce6_5c329f7e7a78.slice/crio-a962bdf166e2c98eef95616c8e70612ef9d13ce07c678b390dabc1f7fac54d92 WatchSource:0}: Error finding container a962bdf166e2c98eef95616c8e70612ef9d13ce07c678b390dabc1f7fac54d92: Status 404 returned error can't find the container with id a962bdf166e2c98eef95616c8e70612ef9d13ce07c678b390dabc1f7fac54d92 Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.400694 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.400955 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.402175 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.405892 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:39 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:39 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:39 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.405950 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.419454 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.419917 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:39.919901468 +0000 UTC m=+145.981051941 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.520925 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-sp65b" podStartSLOduration=123.520902525 podStartE2EDuration="2m3.520902525s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:39.481285845 +0000 UTC m=+145.542436318" watchObservedRunningTime="2025-12-05 17:35:39.520902525 +0000 UTC m=+145.582052998" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.521289 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.521738 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.021724155 +0000 UTC m=+146.082874638 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.523051 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-jj8px" podStartSLOduration=123.523039766 podStartE2EDuration="2m3.523039766s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:39.52023628 +0000 UTC m=+145.581386753" watchObservedRunningTime="2025-12-05 17:35:39.523039766 +0000 UTC m=+145.584190239" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.525120 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f9gdg"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.548684 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-hg4b6"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.560084 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-6pfr2" podStartSLOduration=123.560060325 podStartE2EDuration="2m3.560060325s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:39.551495471 +0000 UTC m=+145.612645944" watchObservedRunningTime="2025-12-05 17:35:39.560060325 +0000 UTC m=+145.621210798" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.561404 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.567510 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-cxwzt"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.595471 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-bghjx"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.607472 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-nksfl" podStartSLOduration=123.60745684 podStartE2EDuration="2m3.60745684s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:39.592156367 +0000 UTC m=+145.653306850" watchObservedRunningTime="2025-12-05 17:35:39.60745684 +0000 UTC m=+145.668607313" Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.608637 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c4krp"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.618922 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.622527 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.622945 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.122927737 +0000 UTC m=+146.184078210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.623737 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25"] Dec 05 17:35:39 crc kubenswrapper[4961]: W1205 17:35:39.626293 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37e0f3d5_9ee5_4aac_aaef_205d8466b84a.slice/crio-a92abc0f4149f6774886a56d17566d7176fa9376fcc8970bcbcc1953effde579 WatchSource:0}: Error finding container a92abc0f4149f6774886a56d17566d7176fa9376fcc8970bcbcc1953effde579: Status 404 returned error can't find the container with id a92abc0f4149f6774886a56d17566d7176fa9376fcc8970bcbcc1953effde579 Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.627789 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj"] Dec 05 17:35:39 crc kubenswrapper[4961]: W1205 17:35:39.629077 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3733b15a_cd45_418e_9452_79a33535ce35.slice/crio-a7498208f6c94fcd820c326d3dd1855869fffbb467db6305014867224a006c58 WatchSource:0}: Error finding container a7498208f6c94fcd820c326d3dd1855869fffbb467db6305014867224a006c58: Status 404 returned error can't find the container with id a7498208f6c94fcd820c326d3dd1855869fffbb467db6305014867224a006c58 Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.630306 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.636550 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5"] Dec 05 17:35:39 crc kubenswrapper[4961]: W1205 17:35:39.643513 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf9f9b3cf_6636_42ce_a11f_d85b976c33a5.slice/crio-e716daafbd4c67384393795ea9e5257ea014d67bfaa9630aadc535a7bbdc7c5b WatchSource:0}: Error finding container e716daafbd4c67384393795ea9e5257ea014d67bfaa9630aadc535a7bbdc7c5b: Status 404 returned error can't find the container with id e716daafbd4c67384393795ea9e5257ea014d67bfaa9630aadc535a7bbdc7c5b Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.646944 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.727262 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.727601 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.227589141 +0000 UTC m=+146.288739614 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.748040 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp"] Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.828495 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.829092 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.329073421 +0000 UTC m=+146.390223894 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:39 crc kubenswrapper[4961]: W1205 17:35:39.905534 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod892bdd9c_95f3_450b_9b7b_917e481f0d6f.slice/crio-a32a0a56dbd4a082ad12e06a1026aee414683c9e6dc20cf1d0fd7670fd2d15ac WatchSource:0}: Error finding container a32a0a56dbd4a082ad12e06a1026aee414683c9e6dc20cf1d0fd7670fd2d15ac: Status 404 returned error can't find the container with id a32a0a56dbd4a082ad12e06a1026aee414683c9e6dc20cf1d0fd7670fd2d15ac Dec 05 17:35:39 crc kubenswrapper[4961]: I1205 17:35:39.930449 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:39 crc kubenswrapper[4961]: E1205 17:35:39.931003 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.430986419 +0000 UTC m=+146.492136892 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.032058 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.032653 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.532628002 +0000 UTC m=+146.593778485 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.033391 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.033736 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.533725748 +0000 UTC m=+146.594876221 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.134231 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.134654 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.634637093 +0000 UTC m=+146.695787566 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.177576 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" event={"ID":"faedd27a-ff89-4838-a73a-51e1c194f2e9","Type":"ContainerStarted","Data":"aa1a7ea8a3e89eeba2d857c156f0f59476554b0f9082a9173d337ca4fee1acd0"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.179033 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.197324 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" event={"ID":"6c7ede11-6cf9-43d8-8165-7d5ce10f9adc","Type":"ContainerStarted","Data":"f8901b111af81941cd4362699d8308d83f7ac07906255f7da0ef9c07b212255e"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.198594 4961 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-4z6bw container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" start-of-body= Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.198633 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" podUID="faedd27a-ff89-4838-a73a-51e1c194f2e9" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.25:8443/healthz\": dial tcp 10.217.0.25:8443: connect: connection refused" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.228168 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" podStartSLOduration=124.228141913 podStartE2EDuration="2m4.228141913s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.215475743 +0000 UTC m=+146.276626216" watchObservedRunningTime="2025-12-05 17:35:40.228141913 +0000 UTC m=+146.289292396" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.235653 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.236810 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.736797219 +0000 UTC m=+146.797947692 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.263923 4961 generic.go:334] "Generic (PLEG): container finished" podID="1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2" containerID="a7e140d15252d9e8c84b1f166aa1db90b73192caba5125b171f4882cc1e5b8b8" exitCode=0 Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.263990 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" event={"ID":"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2","Type":"ContainerDied","Data":"a7e140d15252d9e8c84b1f166aa1db90b73192caba5125b171f4882cc1e5b8b8"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.338872 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.339708 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.839691221 +0000 UTC m=+146.900841694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.345450 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-74j6d" podStartSLOduration=124.345433897 podStartE2EDuration="2m4.345433897s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.24868226 +0000 UTC m=+146.309832733" watchObservedRunningTime="2025-12-05 17:35:40.345433897 +0000 UTC m=+146.406584370" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.347162 4961 generic.go:334] "Generic (PLEG): container finished" podID="e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5" containerID="f4a95f8e6fd12bd77fdba72b561394f7aa00f0d2262cad4f89fa6596ec922568" exitCode=0 Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.347264 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" event={"ID":"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5","Type":"ContainerDied","Data":"f4a95f8e6fd12bd77fdba72b561394f7aa00f0d2262cad4f89fa6596ec922568"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.412797 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" event={"ID":"45c41860-47e0-4fc6-b9e2-73308ab35bfe","Type":"ContainerStarted","Data":"cf9784d6224eedf65690b56c4a042d26ed4e7e57ec423e5cf090da1749306ba3"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.424962 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:40 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:40 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:40 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.425034 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.444333 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.445595 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:40.945577974 +0000 UTC m=+147.006728447 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.464470 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-hg4b6" event={"ID":"37e0f3d5-9ee5-4aac-aaef-205d8466b84a","Type":"ContainerStarted","Data":"a92abc0f4149f6774886a56d17566d7176fa9376fcc8970bcbcc1953effde579"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.504105 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2msfx" event={"ID":"694a4dea-466c-48de-b828-dded0b4e3309","Type":"ContainerStarted","Data":"5c3d9317c8d497d6c64d27d4715889bd0be1d0b28b899b6080c59d7812916be4"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.505588 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.517208 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" event={"ID":"e44fec59-fa03-4dd6-be86-108902060c91","Type":"ContainerStarted","Data":"20e8f007c13d018caeaeb7ed4924a59c4edf63454c948a2b5aefde56118a9a44"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.519157 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" event={"ID":"5dcdb8e2-ea42-41ca-815d-058d299f96bd","Type":"ContainerStarted","Data":"e1348d8ba89f528142d4250da105d954092833c4e4828d89f56d839307206dca"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.521655 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" event={"ID":"2af64acf-53d5-4288-b53f-e889631a7ee4","Type":"ContainerStarted","Data":"ea3fd3ce385c6190b1359f37e19af29523a909c42b1b778101526291fec79675"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.522967 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" event={"ID":"114cc896-bf47-4c02-b478-3bb3b3aaa729","Type":"ContainerStarted","Data":"61d44abd37ef9c52bf888076882b8fc43e5d39c0e122e612ea44229d4573beed"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.523092 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.523122 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.523908 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" event={"ID":"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3","Type":"ContainerStarted","Data":"77542774b61eab557da049efa09605bc2b52d405002e0d0fba87263fe90f8293"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.530842 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" event={"ID":"3d4d981b-d365-4cc1-ac62-0a4237be8bdf","Type":"ContainerStarted","Data":"f0dcea5c0469c3533bd87618af795fa3a49c7f0a1055cafaf9a2909da9221d4b"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.535544 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-hg4b6" podStartSLOduration=7.535527639 podStartE2EDuration="7.535527639s" podCreationTimestamp="2025-12-05 17:35:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.534352231 +0000 UTC m=+146.595502704" watchObservedRunningTime="2025-12-05 17:35:40.535527639 +0000 UTC m=+146.596678102" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.536225 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" podStartSLOduration=124.536218446 podStartE2EDuration="2m4.536218446s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.487332345 +0000 UTC m=+146.548482828" watchObservedRunningTime="2025-12-05 17:35:40.536218446 +0000 UTC m=+146.597368919" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.555009 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.556854 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.056832405 +0000 UTC m=+147.117982888 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.565046 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" event={"ID":"c203f16b-553f-4282-92bb-6b7af8dfc31d","Type":"ContainerStarted","Data":"efc3c7ae8e84b85784087a7c907019134974f7b2e31257fb80de71f8d1158592"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.574690 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-p6lkk" podStartSLOduration=124.574673058 podStartE2EDuration="2m4.574673058s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.574246298 +0000 UTC m=+146.635396781" watchObservedRunningTime="2025-12-05 17:35:40.574673058 +0000 UTC m=+146.635823531" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.590734 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" event={"ID":"02fabe40-bb13-4cec-af0f-0c08d46f511d","Type":"ContainerStarted","Data":"fa85c3c1c6ec23f5bbfd583f839625bff6b4a5278fccb723fdd6ab10c3a6f730"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.597497 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" event={"ID":"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7","Type":"ContainerStarted","Data":"3a743f2fd79ea0e7d09546f990ad93294f52b3c5772984359893499581902ce1"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.599604 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" event={"ID":"4a0cedd3-c46f-462a-8cd9-54629e7cb576","Type":"ContainerStarted","Data":"7625f71b37271a35a38a1bfe1746dc5c3eaea2488ff9e864f0428d7ffa566550"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.608436 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" event={"ID":"9f02aa74-e797-4fe5-8412-0a64e64730c1","Type":"ContainerStarted","Data":"48f147937f2c83e5ed584776c4b0ebd526b4f0a6e842a92ce49bef5f0f40adf4"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.608828 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.632031 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2msfx" podStartSLOduration=124.632012219 podStartE2EDuration="2m4.632012219s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.630214337 +0000 UTC m=+146.691364810" watchObservedRunningTime="2025-12-05 17:35:40.632012219 +0000 UTC m=+146.693162692" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.633347 4961 patch_prober.go:28] interesting pod/console-operator-58897d9998-hlv6r container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" start-of-body= Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.633513 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" podUID="9f02aa74-e797-4fe5-8412-0a64e64730c1" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.29:8443/readyz\": dial tcp 10.217.0.29:8443: connect: connection refused" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.671504 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.678471 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" event={"ID":"17b393b4-5451-4ef1-9486-2b1b6a70a1c3","Type":"ContainerStarted","Data":"96cefbfd3d805619e96eaaf623d4df6e9ced877d4ae00eb07a47ff971efd1422"} Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.697419 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.19733943 +0000 UTC m=+147.258489903 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.707418 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" podStartSLOduration=124.707393339 podStartE2EDuration="2m4.707393339s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.669583051 +0000 UTC m=+146.730733534" watchObservedRunningTime="2025-12-05 17:35:40.707393339 +0000 UTC m=+146.768543812" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.719142 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" event={"ID":"f9f9b3cf-6636-42ce-a11f-d85b976c33a5","Type":"ContainerStarted","Data":"e716daafbd4c67384393795ea9e5257ea014d67bfaa9630aadc535a7bbdc7c5b"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.779100 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" podStartSLOduration=124.77907529 podStartE2EDuration="2m4.77907529s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.706052627 +0000 UTC m=+146.767203110" watchObservedRunningTime="2025-12-05 17:35:40.77907529 +0000 UTC m=+146.840225763" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.780267 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.780354 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.28033245 +0000 UTC m=+147.341482923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.781166 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.781478 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.281461197 +0000 UTC m=+147.342611670 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.807887 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" event={"ID":"f9ea79ab-de4c-4165-82d4-84b9d73df5a4","Type":"ContainerStarted","Data":"86c1713abf615befc67e6621669df2ed90a8f686f756967a28ddd16f8a849e71"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.809674 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.812458 4961 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-hvmkj container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.812510 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" podUID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.842229 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vg677" event={"ID":"af42c194-0d59-4fbb-ab5c-777172b46fd8","Type":"ContainerStarted","Data":"c045dcf8f26d18f99587fbd7f64384de6b19d3aec793f2bae00560764c9032fd"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.862130 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" podStartSLOduration=124.86210361 podStartE2EDuration="2m4.86210361s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.778370254 +0000 UTC m=+146.839520737" watchObservedRunningTime="2025-12-05 17:35:40.86210361 +0000 UTC m=+146.923254083" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.862378 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" podStartSLOduration=124.862374377 podStartE2EDuration="2m4.862374377s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:40.856674131 +0000 UTC m=+146.917824624" watchObservedRunningTime="2025-12-05 17:35:40.862374377 +0000 UTC m=+146.923524850" Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.897622 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" event={"ID":"475e25de-63ce-4cae-8fc6-4c057d616247","Type":"ContainerStarted","Data":"7a29bbf15cfef12970d9d4ca96af39ea82c3ca77901659b73a8babd10e1ea479"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.897666 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" event={"ID":"475e25de-63ce-4cae-8fc6-4c057d616247","Type":"ContainerStarted","Data":"5891352b757c9e537db50eeea1ee084e4d0588dd9c46d809e0c18db6377cd8b1"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.898044 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:40 crc kubenswrapper[4961]: E1205 17:35:40.899271 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.399250132 +0000 UTC m=+147.460400605 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.914914 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" event={"ID":"bfd6bf8b-a642-4717-a3e5-5e972e49e665","Type":"ContainerStarted","Data":"d70ad80ec090d47d16c28b1c34ab68f9a699ddd7172e98051cf4e72fc10d66c7"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.977374 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" event={"ID":"3733b15a-cd45-418e-9452-79a33535ce35","Type":"ContainerStarted","Data":"a7498208f6c94fcd820c326d3dd1855869fffbb467db6305014867224a006c58"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.983514 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" event={"ID":"1c4ea533-79da-4be4-a70c-6de0d3b6c120","Type":"ContainerStarted","Data":"4421bc8da3f7257027c482bcc1e59c752f98c3d643fd0f2cbdd0c271db595b43"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.983563 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" event={"ID":"1c4ea533-79da-4be4-a70c-6de0d3b6c120","Type":"ContainerStarted","Data":"68140245a5dcbe87d9187bfb3e0b08d7d7e3f426bd08a50be43972487bd2bb7f"} Dec 05 17:35:40 crc kubenswrapper[4961]: I1205 17:35:40.996430 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" event={"ID":"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9","Type":"ContainerStarted","Data":"db0ba3abb9d7326c5716653b41c7d506dec06b7ddb8f87adb1c892a5ff83bf7f"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.005965 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-mzkwv" event={"ID":"df860690-ba32-4a90-ab42-80ae129b935a","Type":"ContainerStarted","Data":"6f0d8f6349f18a1b27223a047000ed647b95f3035f33a7acb48860dc0b266526"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.006968 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.007360 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.507348018 +0000 UTC m=+147.568498491 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.039375 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-s4xcd" podStartSLOduration=125.039351308 podStartE2EDuration="2m5.039351308s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.029521715 +0000 UTC m=+147.090672208" watchObservedRunningTime="2025-12-05 17:35:41.039351308 +0000 UTC m=+147.100501781" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.107712 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.109019 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.608996851 +0000 UTC m=+147.670147334 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.109174 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.109506 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.609496673 +0000 UTC m=+147.670647146 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.125022 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" event={"ID":"901195d1-b2c7-4ddb-831c-7f5efca5c758","Type":"ContainerStarted","Data":"133b3e88a21787c0af9ecec004ae17d0c8d70dfa4060b255f1516429be1efe08"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.140072 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" event={"ID":"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e","Type":"ContainerStarted","Data":"560ded6d1befeb21cea345b6f171ba901c9731dc51d83d0a37a77176d40f81c4"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.140129 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" event={"ID":"d1f2c38b-9894-4145-bb3f-a38bfaf9e16e","Type":"ContainerStarted","Data":"37dbb5fce07de42e06871fe92b49c13fc7f77f8197a477148b43275d95e13cc7"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.154687 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" event={"ID":"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6","Type":"ContainerStarted","Data":"97ba11d768b63cc0a8d45cbf504687cdbd76cad95f205c652c74066255f0b63d"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.155463 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.156499 4961 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-f9gdg container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" start-of-body= Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.156543 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" podUID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.9:6443/healthz\": dial tcp 10.217.0.9:6443: connect: connection refused" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.177229 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" event={"ID":"75749479-6797-4377-8ce6-5c329f7e7a78","Type":"ContainerStarted","Data":"a962bdf166e2c98eef95616c8e70612ef9d13ce07c678b390dabc1f7fac54d92"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.185237 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" podStartSLOduration=125.18521128 podStartE2EDuration="2m5.18521128s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.178401788 +0000 UTC m=+147.239552281" watchObservedRunningTime="2025-12-05 17:35:41.18521128 +0000 UTC m=+147.246361753" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.205665 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" event={"ID":"47ebd001-fdad-4dcf-a6ed-6e32320cca9b","Type":"ContainerStarted","Data":"57127d9809b8ab53586f55320766b50ebbdb9052998dc8a85eb8da20e6012800"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.215756 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.216837 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.716759619 +0000 UTC m=+147.777910102 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.249109 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" event={"ID":"892bdd9c-95f3-450b-9b7b-917e481f0d6f","Type":"ContainerStarted","Data":"a32a0a56dbd4a082ad12e06a1026aee414683c9e6dc20cf1d0fd7670fd2d15ac"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.286917 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" podStartSLOduration=125.286898904 podStartE2EDuration="2m5.286898904s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.241192348 +0000 UTC m=+147.302342821" watchObservedRunningTime="2025-12-05 17:35:41.286898904 +0000 UTC m=+147.348049387" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.288253 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" podStartSLOduration=125.288245596 podStartE2EDuration="2m5.288245596s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.28591778 +0000 UTC m=+147.347068273" watchObservedRunningTime="2025-12-05 17:35:41.288245596 +0000 UTC m=+147.349396069" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.344706 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.346205 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.846190061 +0000 UTC m=+147.907340534 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.365176 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" event={"ID":"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb","Type":"ContainerStarted","Data":"731d3a956876b7765d75b7f9328e4833245a4f186e07e83856ba148c6148369e"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.365228 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" event={"ID":"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb","Type":"ContainerStarted","Data":"45f9678143c63a7a9d069817e4725d8d15bd7e4e8aefd2842d376edb801836ec"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.379524 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.394362 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-nx5xr" podStartSLOduration=125.394340244 podStartE2EDuration="2m5.394340244s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.334577825 +0000 UTC m=+147.395728298" watchObservedRunningTime="2025-12-05 17:35:41.394340244 +0000 UTC m=+147.455490727" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.398084 4961 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-cc7mk container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" start-of-body= Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.398155 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" podUID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.7:8443/healthz\": dial tcp 10.217.0.7:8443: connect: connection refused" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.409998 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:41 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:41 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:41 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.410062 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.411998 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-f6zsw" event={"ID":"ce0eed91-28af-4966-b71a-3dda1df5d6cb","Type":"ContainerStarted","Data":"d0c1327d97bac80cb0efc3a3f5e932a01f40b0caeecbdac32e86ccbe340777f3"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.451568 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.452163 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:41.952143836 +0000 UTC m=+148.013294309 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.454662 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" podStartSLOduration=125.454626965 podStartE2EDuration="2m5.454626965s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.451661334 +0000 UTC m=+147.512811817" watchObservedRunningTime="2025-12-05 17:35:41.454626965 +0000 UTC m=+147.515777438" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.454836 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-rt756" podStartSLOduration=125.45483034 podStartE2EDuration="2m5.45483034s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.402723642 +0000 UTC m=+147.463874115" watchObservedRunningTime="2025-12-05 17:35:41.45483034 +0000 UTC m=+147.515980823" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.481438 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-f6zsw" podStartSLOduration=8.481419301 podStartE2EDuration="8.481419301s" podCreationTimestamp="2025-12-05 17:35:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.480304824 +0000 UTC m=+147.541455297" watchObservedRunningTime="2025-12-05 17:35:41.481419301 +0000 UTC m=+147.542569774" Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.544350 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" event={"ID":"601ba962-a964-472d-b481-4946fd7265b1","Type":"ContainerStarted","Data":"771a74f076b123babb6040627e8bf4f8badfa8328f4ffa9397ed0b962fba9fab"} Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.554696 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.557405 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.057388144 +0000 UTC m=+148.118538707 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.655713 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.656135 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.156112416 +0000 UTC m=+148.217262889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.758623 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.759115 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.259100121 +0000 UTC m=+148.320250594 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.860128 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.860400 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.360385645 +0000 UTC m=+148.421536118 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:41 crc kubenswrapper[4961]: I1205 17:35:41.967432 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:41 crc kubenswrapper[4961]: E1205 17:35:41.971369 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.47134816 +0000 UTC m=+148.532498633 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.069481 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.069807 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.569764275 +0000 UTC m=+148.630914758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.070157 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.070607 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.570597284 +0000 UTC m=+148.631747757 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.145680 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" podStartSLOduration=126.145657826 podStartE2EDuration="2m6.145657826s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:41.583455063 +0000 UTC m=+147.644605546" watchObservedRunningTime="2025-12-05 17:35:42.145657826 +0000 UTC m=+148.206808299" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.149212 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-hdlff"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.154555 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.169446 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.173023 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.173231 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-catalog-content\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.173364 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-45prm\" (UniqueName: \"kubernetes.io/projected/af3d9725-8492-4a3a-b125-a545b2d4c8c7-kube-api-access-45prm\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.173429 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-utilities\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.173594 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.673551468 +0000 UTC m=+148.734701951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.314466 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-catalog-content\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.314546 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-45prm\" (UniqueName: \"kubernetes.io/projected/af3d9725-8492-4a3a-b125-a545b2d4c8c7-kube-api-access-45prm\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.314586 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-utilities\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.314620 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.314949 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.814936374 +0000 UTC m=+148.876086847 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.315414 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-catalog-content\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.316190 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-utilities\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.418253 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.418522 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:42.918504213 +0000 UTC m=+148.979654686 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.420410 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hdlff"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.436067 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:42 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:42 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:42 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.436139 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.456670 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-7wrmp"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.457835 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.466769 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.490367 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-45prm\" (UniqueName: \"kubernetes.io/projected/af3d9725-8492-4a3a-b125-a545b2d4c8c7-kube-api-access-45prm\") pod \"community-operators-hdlff\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.519964 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.521105 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.021088977 +0000 UTC m=+149.082239450 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.585892 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-jqd6w" event={"ID":"75749479-6797-4377-8ce6-5c329f7e7a78","Type":"ContainerStarted","Data":"5f3dab140c9cb17f92b7fb66c3aced6e5cf9db0767633e5354c557c387e0ac44"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.597311 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-kgvwm"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.604854 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.606241 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" event={"ID":"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3","Type":"ContainerStarted","Data":"8155cbe2f9ab7bd9dcf528f9ab55171d7147a50696493c699de3efd305e6406b"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.606279 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" event={"ID":"2aa4ad5f-8dcb-4761-a31b-66a26fbc3be3","Type":"ContainerStarted","Data":"67e3fdc4b365576c70420fc428f127eae88e28eccbe18845ad140eed302f8bf7"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.622935 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.623035 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" event={"ID":"3d4d981b-d365-4cc1-ac62-0a4237be8bdf","Type":"ContainerStarted","Data":"71ea9e7e2d4a9f0645b19882fc5e605948e8d63da443f2e1c3b5c0aae6aaad86"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.623075 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" event={"ID":"3d4d981b-d365-4cc1-ac62-0a4237be8bdf","Type":"ContainerStarted","Data":"4dcbe8af7a448503b5bb468bf9b218b4d3ff457a3f10210f1def61b623fd4128"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.623762 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.624206 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.124191775 +0000 UTC m=+149.185342248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624236 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z7khz\" (UniqueName: \"kubernetes.io/projected/6ae1ad4f-caa6-49b9-9d32-6905088903bf-kube-api-access-z7khz\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624260 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8x959\" (UniqueName: \"kubernetes.io/projected/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-kube-api-access-8x959\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624315 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624334 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-utilities\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624360 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-utilities\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624398 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-catalog-content\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.624470 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-catalog-content\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.625712 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.125696931 +0000 UTC m=+149.186847404 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.671160 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" event={"ID":"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7","Type":"ContainerStarted","Data":"5906af0815e26ddb4fb3fe2dcb5c6d9aa06277cf16b9a4e53007ced0607a2aa7"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.671207 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" event={"ID":"e0c0d8d7-7517-40e5-a9d4-d1c5160d64b7","Type":"ContainerStarted","Data":"2a95a6312c754dee7324a2dbe4429d7a00955dbdaf02de13c247a8a522001049"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.671809 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.693922 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" event={"ID":"e42cbe9a-2cfd-4cf7-b43f-6ee4458995a5","Type":"ContainerStarted","Data":"9448facdfa3d2d467341f90919deb84f8648864d9593c5a6d948a53e368847fd"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.694027 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.698414 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7wrmp"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.721492 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kgvwm"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.725989 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.726273 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-catalog-content\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.726340 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-catalog-content\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.726369 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z7khz\" (UniqueName: \"kubernetes.io/projected/6ae1ad4f-caa6-49b9-9d32-6905088903bf-kube-api-access-z7khz\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.726400 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8x959\" (UniqueName: \"kubernetes.io/projected/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-kube-api-access-8x959\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727187 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-catalog-content\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.727223 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.227194589 +0000 UTC m=+149.288345062 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727394 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727435 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-utilities\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727501 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-utilities\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.727671 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.227662481 +0000 UTC m=+149.288812954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727679 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-catalog-content\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727837 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-utilities\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.727952 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-utilities\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.746191 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" event={"ID":"114cc896-bf47-4c02-b478-3bb3b3aaa729","Type":"ContainerStarted","Data":"12e5c8e6a33948a1505399e4c454a0072a6dd2cf8945b865bcbab16a9748503b"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.762387 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-bghjx" event={"ID":"f9f9b3cf-6636-42ce-a11f-d85b976c33a5","Type":"ContainerStarted","Data":"d519700b359b3ef7ad4f10064af9fca01cbd3f5d0bf4969191fbb883841251cf"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.776730 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-c5wk7"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.780823 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.790054 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z7khz\" (UniqueName: \"kubernetes.io/projected/6ae1ad4f-caa6-49b9-9d32-6905088903bf-kube-api-access-z7khz\") pod \"community-operators-kgvwm\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.813720 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8x959\" (UniqueName: \"kubernetes.io/projected/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-kube-api-access-8x959\") pod \"certified-operators-7wrmp\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.821331 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5wk7"] Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.822065 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" event={"ID":"475e25de-63ce-4cae-8fc6-4c057d616247","Type":"ContainerStarted","Data":"1b8500d513bb2219da4019107ea24f429ad71858a7e9d73092b2ab2c24438449"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.846377 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.846902 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.846925 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.846972 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.847033 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b877\" (UniqueName: \"kubernetes.io/projected/eb4981fe-4b26-4626-8c45-ba311dc825d9-kube-api-access-6b877\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.847050 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-utilities\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.847065 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-catalog-content\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.849872 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.850034 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.350016905 +0000 UTC m=+149.411167378 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.855351 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.865484 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.908751 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.908853 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.936038 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" event={"ID":"bfd6bf8b-a642-4717-a3e5-5e972e49e665","Type":"ContainerStarted","Data":"b5a837197edf67f707e42f363d316df458d8872b9b9a9cf55d27b6b55bcb195d"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.943897 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" event={"ID":"17b393b4-5451-4ef1-9486-2b1b6a70a1c3","Type":"ContainerStarted","Data":"9211174f451171f8f1c8276b8af662c28b7170accbb5a8f74a19d21348932daa"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.943931 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" event={"ID":"17b393b4-5451-4ef1-9486-2b1b6a70a1c3","Type":"ContainerStarted","Data":"98a73216c35b8f594500f3fd3fbf1e1f52dded249473c8a7f20bf87024572270"} Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.948015 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b877\" (UniqueName: \"kubernetes.io/projected/eb4981fe-4b26-4626-8c45-ba311dc825d9-kube-api-access-6b877\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.948969 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-utilities\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.948988 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-catalog-content\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.949354 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-utilities\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.949508 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-catalog-content\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.949739 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.949943 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:42 crc kubenswrapper[4961]: E1205 17:35:42.950192 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.450182073 +0000 UTC m=+149.511332546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:42 crc kubenswrapper[4961]: I1205 17:35:42.976635 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.000360 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.005484 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b877\" (UniqueName: \"kubernetes.io/projected/eb4981fe-4b26-4626-8c45-ba311dc825d9-kube-api-access-6b877\") pod \"certified-operators-c5wk7\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.024039 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-5ks25" podStartSLOduration=127.024012934 podStartE2EDuration="2m7.024012934s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.001730176 +0000 UTC m=+149.062880659" watchObservedRunningTime="2025-12-05 17:35:43.024012934 +0000 UTC m=+149.085163407" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.034120 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q89t5" event={"ID":"c203f16b-553f-4282-92bb-6b7af8dfc31d","Type":"ContainerStarted","Data":"5da62dfd6f655ce32909975a3118824079d161510e76bfef285ea7dd776d6524"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.051149 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.051599 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.551584199 +0000 UTC m=+149.612734672 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.113921 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.143143 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" event={"ID":"3733b15a-cd45-418e-9452-79a33535ce35","Type":"ContainerStarted","Data":"3e79ab8c02f5039d8fb9f1d8d54f3135d72ab16ace4041ebc8586eba0e199646"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.144383 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.152186 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.152818 4961 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-c4krp container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" start-of-body= Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.152852 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" podUID="3733b15a-cd45-418e-9452-79a33535ce35" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.28:8080/healthz\": dial tcp 10.217.0.28:8080: connect: connection refused" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.153844 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.154312 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.654292797 +0000 UTC m=+149.715443270 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.196110 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.201591 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" podStartSLOduration=127.201567939 podStartE2EDuration="2m7.201567939s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.126141529 +0000 UTC m=+149.187292002" watchObservedRunningTime="2025-12-05 17:35:43.201567939 +0000 UTC m=+149.262718412" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.250259 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-hpvmn" podStartSLOduration=127.250242635 podStartE2EDuration="2m7.250242635s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.207586163 +0000 UTC m=+149.268736636" watchObservedRunningTime="2025-12-05 17:35:43.250242635 +0000 UTC m=+149.311393108" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.251265 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-mwbff" podStartSLOduration=127.251256179 podStartE2EDuration="2m7.251256179s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.249166739 +0000 UTC m=+149.310317202" watchObservedRunningTime="2025-12-05 17:35:43.251256179 +0000 UTC m=+149.312406652" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.256502 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.256629 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.756608786 +0000 UTC m=+149.817759269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.257020 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.257395 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.757383634 +0000 UTC m=+149.818534107 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.272250 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" event={"ID":"02fabe40-bb13-4cec-af0f-0c08d46f511d","Type":"ContainerStarted","Data":"1ecd6c899d13a2326c47213566df84ed501c9cdd4c5cfa1cac3aee4820c3e0b5"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.273435 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.436616 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.437864 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:43.937849098 +0000 UTC m=+149.998999571 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.448268 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.450755 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:43 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:43 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:43 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.450806 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.465065 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-5x2t5" event={"ID":"901195d1-b2c7-4ddb-831c-7f5efca5c758","Type":"ContainerStarted","Data":"c6257de38210d2cd04080d8f873618c2f9eb82d03c1d50b62ad1308c798644aa"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.486212 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-mzkwv" event={"ID":"df860690-ba32-4a90-ab42-80ae129b935a","Type":"ContainerStarted","Data":"0b3230e0d42682a92ea87c50dae94b725ae4dc823b4f4be831ed71a15d7c3425"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.486872 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.488002 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-5hmrk" event={"ID":"601ba962-a964-472d-b481-4946fd7265b1","Type":"ContainerStarted","Data":"e1a31306d8a7a5654541494ed9495550208b1bec891c4ffe63132d9dd19d7ceb"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.521898 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" event={"ID":"f9ea79ab-de4c-4165-82d4-84b9d73df5a4","Type":"ContainerStarted","Data":"40ac9277f765c64a6048308efb71c5191855576b3396ea888ee9dfd0fd56d1d3"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.542223 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.546856 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.046841615 +0000 UTC m=+150.107992088 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.547744 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.619652 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" podStartSLOduration=127.619632992 podStartE2EDuration="2m7.619632992s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.465581066 +0000 UTC m=+149.526731539" watchObservedRunningTime="2025-12-05 17:35:43.619632992 +0000 UTC m=+149.680783465" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.627138 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-vg677" event={"ID":"af42c194-0d59-4fbb-ab5c-777172b46fd8","Type":"ContainerStarted","Data":"f021f9b64c3dc1eae18b1611c987461c815875e298c92bac8e029b77dc8b2d86"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.648519 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.649058 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.14903707 +0000 UTC m=+150.210187543 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.666553 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" podStartSLOduration=127.666528595 podStartE2EDuration="2m7.666528595s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.620904083 +0000 UTC m=+149.682054556" watchObservedRunningTime="2025-12-05 17:35:43.666528595 +0000 UTC m=+149.727679068" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.682870 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-zswqb" podStartSLOduration=127.682823132 podStartE2EDuration="2m7.682823132s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.667735144 +0000 UTC m=+149.728885617" watchObservedRunningTime="2025-12-05 17:35:43.682823132 +0000 UTC m=+149.743973615" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.687866 4961 generic.go:334] "Generic (PLEG): container finished" podID="892bdd9c-95f3-450b-9b7b-917e481f0d6f" containerID="2a1b0c5c35cceddf4a0b5aec5c28f75ebd302a9dfb9453e7c75c20685dbb919c" exitCode=0 Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.688000 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" event={"ID":"892bdd9c-95f3-450b-9b7b-917e481f0d6f","Type":"ContainerDied","Data":"2a1b0c5c35cceddf4a0b5aec5c28f75ebd302a9dfb9453e7c75c20685dbb919c"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.756061 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.757915 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.257903535 +0000 UTC m=+150.319054008 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.776427 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" event={"ID":"2af64acf-53d5-4288-b53f-e889631a7ee4","Type":"ContainerStarted","Data":"afb1f149d8537d9c4cb395aec7bdedd28266cc3d9ee7655b1f8de7d418438152"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.799519 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" event={"ID":"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6","Type":"ContainerStarted","Data":"2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.802363 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-znxrq" podStartSLOduration=127.802350159 podStartE2EDuration="2m7.802350159s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.768010054 +0000 UTC m=+149.829160527" watchObservedRunningTime="2025-12-05 17:35:43.802350159 +0000 UTC m=+149.863500632" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.833127 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-hg4b6" event={"ID":"37e0f3d5-9ee5-4aac-aaef-205d8466b84a","Type":"ContainerStarted","Data":"1d30f97873355fdb05ee5f04163f46cd792accab4bcfe1383391a26aaae34fa8"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.857017 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.865128 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.358911702 +0000 UTC m=+150.420062195 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.865273 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.866578 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.366557583 +0000 UTC m=+150.427708116 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.869764 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-6px8t" podStartSLOduration=127.869745328 podStartE2EDuration="2m7.869745328s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:43.868748126 +0000 UTC m=+149.929898599" watchObservedRunningTime="2025-12-05 17:35:43.869745328 +0000 UTC m=+149.930895801" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.896191 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" event={"ID":"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2","Type":"ContainerStarted","Data":"84e99ff73a2df916b54216e63c08d121259173182c4cb57a4b9b70c74cb2bcf5"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.922841 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" event={"ID":"e44fec59-fa03-4dd6-be86-108902060c91","Type":"ContainerStarted","Data":"1860d6dbf056410fd34e1541a7febe76df258dfdf57fbe6a5f4abf933f26592f"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.925494 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.954114 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.954160 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.954875 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" event={"ID":"4a0cedd3-c46f-462a-8cd9-54629e7cb576","Type":"ContainerStarted","Data":"52c340205ef7e69acc55f617f7c6e3cdde7174bf2beef4a294f34f2409cfbde9"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.954903 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" event={"ID":"4a0cedd3-c46f-462a-8cd9-54629e7cb576","Type":"ContainerStarted","Data":"b8ec19b4441a44589c9fd36905ca23b08682826471febe05d793df6a23a0de4f"} Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.968340 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:43 crc kubenswrapper[4961]: E1205 17:35:43.970537 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.47051659 +0000 UTC m=+150.531667063 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.973400 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:35:43 crc kubenswrapper[4961]: I1205 17:35:43.979427 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-4z6bw" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.005395 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-hlv6r" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.087596 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.088802 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.588761418 +0000 UTC m=+150.649911891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.149147 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-7xwcd" podStartSLOduration=128.14911858 podStartE2EDuration="2m8.14911858s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.005609764 +0000 UTC m=+150.066760247" watchObservedRunningTime="2025-12-05 17:35:44.14911858 +0000 UTC m=+150.210269053" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.169002 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" podStartSLOduration=129.168981101 podStartE2EDuration="2m9.168981101s" podCreationTimestamp="2025-12-05 17:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.095045416 +0000 UTC m=+150.156195909" watchObservedRunningTime="2025-12-05 17:35:44.168981101 +0000 UTC m=+150.230131574" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.177547 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-mzkwv" podStartSLOduration=11.177525624 podStartE2EDuration="11.177525624s" podCreationTimestamp="2025-12-05 17:35:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.168213164 +0000 UTC m=+150.229363647" watchObservedRunningTime="2025-12-05 17:35:44.177525624 +0000 UTC m=+150.238676097" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.191500 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.191871 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.691856775 +0000 UTC m=+150.753007248 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.219664 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2djc5"] Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.246976 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.300750 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-catalog-content\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.300874 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-utilities\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.300913 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.300985 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg48n\" (UniqueName: \"kubernetes.io/projected/39220eba-93d5-4e82-89a9-97d1383522a4-kube-api-access-xg48n\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.301354 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.801338493 +0000 UTC m=+150.862488966 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.308871 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.318363 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-cxwzt" podStartSLOduration=129.318329547 podStartE2EDuration="2m9.318329547s" podCreationTimestamp="2025-12-05 17:33:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.233308398 +0000 UTC m=+150.294458891" watchObservedRunningTime="2025-12-05 17:35:44.318329547 +0000 UTC m=+150.379480020" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.320648 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2djc5"] Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.397110 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-vg677" podStartSLOduration=128.397091686 podStartE2EDuration="2m8.397091686s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.395015807 +0000 UTC m=+150.456166280" watchObservedRunningTime="2025-12-05 17:35:44.397091686 +0000 UTC m=+150.458242159" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.402464 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.402752 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-utilities\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.402916 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg48n\" (UniqueName: \"kubernetes.io/projected/39220eba-93d5-4e82-89a9-97d1383522a4-kube-api-access-xg48n\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.402947 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-catalog-content\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.404499 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-catalog-content\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.405071 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:44.905052505 +0000 UTC m=+150.966202978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.405570 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-utilities\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.412730 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:44 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:44 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:44 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.429946 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.433255 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-hdlff"] Dec 05 17:35:44 crc kubenswrapper[4961]: W1205 17:35:44.468255 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf3d9725_8492_4a3a_b125_a545b2d4c8c7.slice/crio-1606c93c4330eead7795a39c908cfd878e68bf8d2d50416613e52e0b56cc71ab WatchSource:0}: Error finding container 1606c93c4330eead7795a39c908cfd878e68bf8d2d50416613e52e0b56cc71ab: Status 404 returned error can't find the container with id 1606c93c4330eead7795a39c908cfd878e68bf8d2d50416613e52e0b56cc71ab Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.500526 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg48n\" (UniqueName: \"kubernetes.io/projected/39220eba-93d5-4e82-89a9-97d1383522a4-kube-api-access-xg48n\") pod \"redhat-marketplace-2djc5\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.504707 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.505129 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.00511601 +0000 UTC m=+151.066266483 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.549069 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-vpcxg" podStartSLOduration=128.549042443 podStartE2EDuration="2m8.549042443s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.506676337 +0000 UTC m=+150.567826820" watchObservedRunningTime="2025-12-05 17:35:44.549042443 +0000 UTC m=+150.610192916" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.593831 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-j4wq7"] Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.595016 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.606259 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.606719 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.106700221 +0000 UTC m=+151.167850694 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.664124 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-7wrmp"] Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.673231 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.707800 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbz4g\" (UniqueName: \"kubernetes.io/projected/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-kube-api-access-qbz4g\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.707885 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.707920 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-utilities\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.708017 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-catalog-content\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.708284 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.208265802 +0000 UTC m=+151.269416275 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.754702 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4wq7"] Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.792035 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-xlpsm" podStartSLOduration=128.792010779 podStartE2EDuration="2m8.792010779s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:44.741237735 +0000 UTC m=+150.802388208" watchObservedRunningTime="2025-12-05 17:35:44.792010779 +0000 UTC m=+150.853161252" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.809866 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.810021 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-catalog-content\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.810058 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbz4g\" (UniqueName: \"kubernetes.io/projected/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-kube-api-access-qbz4g\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.810101 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-utilities\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.810507 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-utilities\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.810590 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.31057677 +0000 UTC m=+151.371727243 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.810813 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-catalog-content\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.839540 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-kgvwm"] Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.879945 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbz4g\" (UniqueName: \"kubernetes.io/projected/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-kube-api-access-qbz4g\") pod \"redhat-marketplace-j4wq7\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.911230 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:44 crc kubenswrapper[4961]: E1205 17:35:44.911595 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.411581308 +0000 UTC m=+151.472731781 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:44 crc kubenswrapper[4961]: I1205 17:35:44.996624 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.000291 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerStarted","Data":"601aab3516033a5634d773845d0d202de78416c4ccfbc100cc0a28bd46368409"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.047161 4961 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.048216 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.048923 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.548901318 +0000 UTC m=+151.610051791 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.100545 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-c5wk7"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.181297 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.181638 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.681626377 +0000 UTC m=+151.742776850 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.184415 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" event={"ID":"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9","Type":"ContainerStarted","Data":"d247416d0a913ada6474f90acaf6de6a95303984fd8ef6380025313a20797977"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.184457 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" event={"ID":"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9","Type":"ContainerStarted","Data":"5159ebe7649e4cbea06a9655ebe4a5a85376e6f425ffc0a49499eb4a1864eb64"} Dec 05 17:35:45 crc kubenswrapper[4961]: W1205 17:35:45.199551 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe485a1_e14f_4c09_b5b9_f252bc42b7e8.slice/crio-28dec32387dad4e96613ea4a980f82843eb1729a8a652c3aad079cfb694993e3 WatchSource:0}: Error finding container 28dec32387dad4e96613ea4a980f82843eb1729a8a652c3aad079cfb694993e3: Status 404 returned error can't find the container with id 28dec32387dad4e96613ea4a980f82843eb1729a8a652c3aad079cfb694993e3 Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.222313 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" event={"ID":"1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2","Type":"ContainerStarted","Data":"b2e3889406e29c3b65ba536fc60abf5800afeb36714de7fab005805e80f4e084"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.244241 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerStarted","Data":"1606c93c4330eead7795a39c908cfd878e68bf8d2d50416613e52e0b56cc71ab"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.251840 4961 generic.go:334] "Generic (PLEG): container finished" podID="45c41860-47e0-4fc6-b9e2-73308ab35bfe" containerID="cf9784d6224eedf65690b56c4a042d26ed4e7e57ec423e5cf090da1749306ba3" exitCode=0 Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.251914 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" event={"ID":"45c41860-47e0-4fc6-b9e2-73308ab35bfe","Type":"ContainerDied","Data":"cf9784d6224eedf65690b56c4a042d26ed4e7e57ec423e5cf090da1749306ba3"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.271928 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2djc5"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.282309 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.282731 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.782713927 +0000 UTC m=+151.843864400 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.311261 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" event={"ID":"892bdd9c-95f3-450b-9b7b-917e481f0d6f","Type":"ContainerStarted","Data":"5809c38166059a6288458ec67c8643dbf8ab772307436d9fb0dbce071affbe1a"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.318886 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"65e2b7efde374b4816ebe4d2d56109f65e5abc20d8bf63c2aec6271d8b5ce794"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.347739 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerStarted","Data":"c22451a68046fc921894d3c0c4d1adb1ed1f0d6763703b0e032a73354f1f72d5"} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.348743 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-2vgjf"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.354603 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.354659 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.356749 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.361959 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.363849 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.386906 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2vgjf"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.387819 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.389238 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.889226425 +0000 UTC m=+151.950376898 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.408628 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:45 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:45 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:45 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.408690 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.488584 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.488798 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-catalog-content\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.488963 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-crfnl\" (UniqueName: \"kubernetes.io/projected/4ab60ade-3b80-4731-9fa7-09c77a0fa666-kube-api-access-crfnl\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.489582 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-utilities\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.490371 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:45.990355895 +0000 UTC m=+152.051506368 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.591319 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4wq7"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.592264 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-utilities\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.592334 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-catalog-content\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.592394 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-crfnl\" (UniqueName: \"kubernetes.io/projected/4ab60ade-3b80-4731-9fa7-09c77a0fa666-kube-api-access-crfnl\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.592431 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.592965 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-utilities\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.593155 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-catalog-content\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.593333 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-12-05 17:35:46.093318459 +0000 UTC m=+152.154469102 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-ksm5f" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.612087 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" podStartSLOduration=129.612066044 podStartE2EDuration="2m9.612066044s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:45.611113642 +0000 UTC m=+151.672264125" watchObservedRunningTime="2025-12-05 17:35:45.612066044 +0000 UTC m=+151.673216537" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.638616 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-crfnl\" (UniqueName: \"kubernetes.io/projected/4ab60ade-3b80-4731-9fa7-09c77a0fa666-kube-api-access-crfnl\") pod \"redhat-operators-2vgjf\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.668591 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-qtkvq" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.690329 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.705317 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:45 crc kubenswrapper[4961]: E1205 17:35:45.705840 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-12-05 17:35:46.20582016 +0000 UTC m=+152.266970633 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.727255 4961 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-12-05T17:35:45.047187106Z","Handler":null,"Name":""} Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.733287 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-4rgn2"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.734664 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.742938 4961 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.742975 4961 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.810623 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-catalog-content\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.811062 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jjb9\" (UniqueName: \"kubernetes.io/projected/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-kube-api-access-7jjb9\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.811131 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.811202 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-utilities\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.844383 4961 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.844437 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.847728 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4rgn2"] Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.912332 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-utilities\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.912394 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-catalog-content\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.912410 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jjb9\" (UniqueName: \"kubernetes.io/projected/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-kube-api-access-7jjb9\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.922389 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-catalog-content\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.924644 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-utilities\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.974284 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jjb9\" (UniqueName: \"kubernetes.io/projected/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-kube-api-access-7jjb9\") pod \"redhat-operators-4rgn2\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:45 crc kubenswrapper[4961]: I1205 17:35:45.978708 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-ksm5f\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.005811 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.005884 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.008139 4961 patch_prober.go:28] interesting pod/console-f9d7485db-sp65b container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.008201 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-sp65b" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.013618 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.017391 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.090658 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.145570 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.146382 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.153030 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.158707 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.218899 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f99802e1-193a-41c4-9e99-6012f6063111-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.219241 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f99802e1-193a-41c4-9e99-6012f6063111-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.227762 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.251104 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-2vgjf"] Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.265888 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.322724 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f99802e1-193a-41c4-9e99-6012f6063111-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.322869 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f99802e1-193a-41c4-9e99-6012f6063111-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.323043 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f99802e1-193a-41c4-9e99-6012f6063111-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.370638 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f99802e1-193a-41c4-9e99-6012f6063111-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.392492 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"e2f3d14dafabaaf169ab3c30c7b59675834ab29fef6dded2f653d784ed628c46"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.392544 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"28dec32387dad4e96613ea4a980f82843eb1729a8a652c3aad079cfb694993e3"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.395412 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"48e804362ac743f1debaf76c689c5da6f4072ee71b4427ef5555e5c4c5ee5f03"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.397860 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.422735 4961 generic.go:334] "Generic (PLEG): container finished" podID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerID="460fed82e92c0840ebc91e497b9f94acc5adcc1b2980d140c8075c6bdb250f52" exitCode=0 Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.423332 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerDied","Data":"460fed82e92c0840ebc91e497b9f94acc5adcc1b2980d140c8075c6bdb250f52"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.424022 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:46 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:46 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:46 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.424086 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.453369 4961 generic.go:334] "Generic (PLEG): container finished" podID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerID="52f4767a07d47bd31ad777de4907e268a8be689dd6e8b4302b6986c97dafa48a" exitCode=0 Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.453725 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerDied","Data":"52f4767a07d47bd31ad777de4907e268a8be689dd6e8b4302b6986c97dafa48a"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.477922 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.508225 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.552020 4961 generic.go:334] "Generic (PLEG): container finished" podID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerID="bb6c570c3a003db52c6bbe61fd99fae6f7705cd2cb58b385a5e2c19599728a56" exitCode=0 Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.552336 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4wq7" event={"ID":"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292","Type":"ContainerDied","Data":"bb6c570c3a003db52c6bbe61fd99fae6f7705cd2cb58b385a5e2c19599728a56"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.552439 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4wq7" event={"ID":"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292","Type":"ContainerStarted","Data":"539d8c02ed3ee3c4e95df113e0022abeb7fe3fe9c8507b031887a47955409a80"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.569349 4961 generic.go:334] "Generic (PLEG): container finished" podID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerID="26109713ebb34eea5e0f7550c9731a7146072fb36351df0eb779f9c7d722f8d0" exitCode=0 Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.569445 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wk7" event={"ID":"eb4981fe-4b26-4626-8c45-ba311dc825d9","Type":"ContainerDied","Data":"26109713ebb34eea5e0f7550c9731a7146072fb36351df0eb779f9c7d722f8d0"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.569487 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wk7" event={"ID":"eb4981fe-4b26-4626-8c45-ba311dc825d9","Type":"ContainerStarted","Data":"2ce3953ecab7964b89db55b50b1985142774ae0ab54d34ab7f44c143025b113e"} Dec 05 17:35:46 crc kubenswrapper[4961]: W1205 17:35:46.617159 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4ab60ade_3b80_4731_9fa7_09c77a0fa666.slice/crio-36c7a194db65c9fac5439ad29418d0095870e4a5df373861cd28a9872b0d4d7b WatchSource:0}: Error finding container 36c7a194db65c9fac5439ad29418d0095870e4a5df373861cd28a9872b0d4d7b: Status 404 returned error can't find the container with id 36c7a194db65c9fac5439ad29418d0095870e4a5df373861cd28a9872b0d4d7b Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.617344 4961 generic.go:334] "Generic (PLEG): container finished" podID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerID="84ea574183ec67be2dae60a8be478ef34819c26b18cfb8f50943eba6223caf95" exitCode=0 Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.617431 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerDied","Data":"84ea574183ec67be2dae60a8be478ef34819c26b18cfb8f50943eba6223caf95"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.643570 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-4rgn2"] Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.664821 4961 generic.go:334] "Generic (PLEG): container finished" podID="39220eba-93d5-4e82-89a9-97d1383522a4" containerID="7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf" exitCode=0 Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.664882 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2djc5" event={"ID":"39220eba-93d5-4e82-89a9-97d1383522a4","Type":"ContainerDied","Data":"7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.664909 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2djc5" event={"ID":"39220eba-93d5-4e82-89a9-97d1383522a4","Type":"ContainerStarted","Data":"21f64927db412b8795edeaaddcdc37ed23528e4c959b6dd5e04631b321349cc0"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.668858 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"4717ce6b3bd74ddd08e3c68139cf702db1b03a5345a5eedd4765321b4b92294d"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.668904 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"1db27059ff29d85050576205cebb259b03e0f2e1e40b2736abf6abf8d9747455"} Dec 05 17:35:46 crc kubenswrapper[4961]: E1205 17:35:46.692662 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39220eba_93d5_4e82_89a9_97d1383522a4.slice/crio-7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:35:46 crc kubenswrapper[4961]: W1205 17:35:46.707135 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod81f0c1f1_bb8e_4ae4_a87b_460ec2f12258.slice/crio-c1383dc0ded80127b334d1ddf2a0d30f4dc5bb34dce50cad013322abccad284a WatchSource:0}: Error finding container c1383dc0ded80127b334d1ddf2a0d30f4dc5bb34dce50cad013322abccad284a: Status 404 returned error can't find the container with id c1383dc0ded80127b334d1ddf2a0d30f4dc5bb34dce50cad013322abccad284a Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.726421 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" event={"ID":"7ed9f05b-c348-41bd-bce4-cc4ccc7c0bd9","Type":"ContainerStarted","Data":"2982fca5052f45e0db14bf920702ba9290e1849fcf8d3078ede012aea1334ae1"} Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.963402 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Dec 05 17:35:46 crc kubenswrapper[4961]: I1205 17:35:46.983064 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-wm6wm" podStartSLOduration=13.983045685 podStartE2EDuration="13.983045685s" podCreationTimestamp="2025-12-05 17:35:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:46.980656278 +0000 UTC m=+153.041806751" watchObservedRunningTime="2025-12-05 17:35:46.983045685 +0000 UTC m=+153.044196158" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.173028 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ksm5f"] Dec 05 17:35:47 crc kubenswrapper[4961]: W1205 17:35:47.209152 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod31cbe53c_5dbf_4d21_8daa_da44aa13b7dc.slice/crio-cb4ae14b4140cbf7ab68e0484bb011d1812010fd79391222cc6f9e6a51960220 WatchSource:0}: Error finding container cb4ae14b4140cbf7ab68e0484bb011d1812010fd79391222cc6f9e6a51960220: Status 404 returned error can't find the container with id cb4ae14b4140cbf7ab68e0484bb011d1812010fd79391222cc6f9e6a51960220 Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.210305 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.253441 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.276509 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7vr9\" (UniqueName: \"kubernetes.io/projected/45c41860-47e0-4fc6-b9e2-73308ab35bfe-kube-api-access-j7vr9\") pod \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.276580 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45c41860-47e0-4fc6-b9e2-73308ab35bfe-secret-volume\") pod \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.276652 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45c41860-47e0-4fc6-b9e2-73308ab35bfe-config-volume\") pod \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\" (UID: \"45c41860-47e0-4fc6-b9e2-73308ab35bfe\") " Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.277962 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/45c41860-47e0-4fc6-b9e2-73308ab35bfe-config-volume" (OuterVolumeSpecName: "config-volume") pod "45c41860-47e0-4fc6-b9e2-73308ab35bfe" (UID: "45c41860-47e0-4fc6-b9e2-73308ab35bfe"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.283937 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/45c41860-47e0-4fc6-b9e2-73308ab35bfe-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "45c41860-47e0-4fc6-b9e2-73308ab35bfe" (UID: "45c41860-47e0-4fc6-b9e2-73308ab35bfe"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.287080 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/45c41860-47e0-4fc6-b9e2-73308ab35bfe-kube-api-access-j7vr9" (OuterVolumeSpecName: "kube-api-access-j7vr9") pod "45c41860-47e0-4fc6-b9e2-73308ab35bfe" (UID: "45c41860-47e0-4fc6-b9e2-73308ab35bfe"). InnerVolumeSpecName "kube-api-access-j7vr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.373889 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.374291 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.378921 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/45c41860-47e0-4fc6-b9e2-73308ab35bfe-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.378964 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7vr9\" (UniqueName: \"kubernetes.io/projected/45c41860-47e0-4fc6-b9e2-73308ab35bfe-kube-api-access-j7vr9\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.378978 4961 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/45c41860-47e0-4fc6-b9e2-73308ab35bfe-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.380341 4961 patch_prober.go:28] interesting pod/apiserver-76f77b778f-8vtdg container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]log ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]etcd ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/start-apiserver-admission-initializer ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/generic-apiserver-start-informers ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/max-in-flight-filter ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/storage-object-count-tracker-hook ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/image.openshift.io-apiserver-caches ok Dec 05 17:35:47 crc kubenswrapper[4961]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Dec 05 17:35:47 crc kubenswrapper[4961]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/project.openshift.io-projectcache ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/openshift.io-startinformers ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/openshift.io-restmapperupdater ok Dec 05 17:35:47 crc kubenswrapper[4961]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Dec 05 17:35:47 crc kubenswrapper[4961]: livez check failed Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.380401 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" podUID="1906a0cc-be0d-48aa-9d8a-b48c9d60d5b2" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.402010 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.404258 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:47 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:47 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:47 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.404299 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.564321 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.564386 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.564855 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.564875 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.760615 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.760659 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.792344 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.843136 4961 generic.go:334] "Generic (PLEG): container finished" podID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerID="d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3" exitCode=0 Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.843220 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerDied","Data":"d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.843252 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerStarted","Data":"36c7a194db65c9fac5439ad29418d0095870e4a5df373861cd28a9872b0d4d7b"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.856849 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.856899 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd" event={"ID":"45c41860-47e0-4fc6-b9e2-73308ab35bfe","Type":"ContainerDied","Data":"be1484d44fb83eb6598038e5ffe1529c4bc75d386138bc34d3b5d2f8142e5688"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.856989 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be1484d44fb83eb6598038e5ffe1529c4bc75d386138bc34d3b5d2f8142e5688" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.863173 4961 generic.go:334] "Generic (PLEG): container finished" podID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerID="94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f" exitCode=0 Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.863297 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerDied","Data":"94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.863325 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerStarted","Data":"c1383dc0ded80127b334d1ddf2a0d30f4dc5bb34dce50cad013322abccad284a"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.872417 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" event={"ID":"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc","Type":"ContainerStarted","Data":"7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.872515 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" event={"ID":"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc","Type":"ContainerStarted","Data":"cb4ae14b4140cbf7ab68e0484bb011d1812010fd79391222cc6f9e6a51960220"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.876051 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.896358 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f99802e1-193a-41c4-9e99-6012f6063111","Type":"ContainerStarted","Data":"004b8b3afaab00811423a239e45567c8e481709f20dc63f8d115ec5a04091536"} Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.916424 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-ld8sp" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.962704 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" podStartSLOduration=131.962680528 podStartE2EDuration="2m11.962680528s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:47.956729336 +0000 UTC m=+154.017879939" watchObservedRunningTime="2025-12-05 17:35:47.962680528 +0000 UTC m=+154.023831001" Dec 05 17:35:47 crc kubenswrapper[4961]: I1205 17:35:47.983020 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=1.98299443 podStartE2EDuration="1.98299443s" podCreationTimestamp="2025-12-05 17:35:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:47.98129644 +0000 UTC m=+154.042446923" watchObservedRunningTime="2025-12-05 17:35:47.98299443 +0000 UTC m=+154.044144903" Dec 05 17:35:48 crc kubenswrapper[4961]: I1205 17:35:48.408736 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:48 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:48 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:48 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:48 crc kubenswrapper[4961]: I1205 17:35:48.409023 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:48 crc kubenswrapper[4961]: I1205 17:35:48.907948 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f99802e1-193a-41c4-9e99-6012f6063111","Type":"ContainerStarted","Data":"a8b7573c7197d2c9174213c5c738425ed39697e08820ca055578421bd7621b1d"} Dec 05 17:35:49 crc kubenswrapper[4961]: I1205 17:35:49.402976 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:49 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:49 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:49 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:49 crc kubenswrapper[4961]: I1205 17:35:49.403061 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:49 crc kubenswrapper[4961]: I1205 17:35:49.545741 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-mzkwv" Dec 05 17:35:49 crc kubenswrapper[4961]: I1205 17:35:49.922264 4961 generic.go:334] "Generic (PLEG): container finished" podID="f99802e1-193a-41c4-9e99-6012f6063111" containerID="a8b7573c7197d2c9174213c5c738425ed39697e08820ca055578421bd7621b1d" exitCode=0 Dec 05 17:35:49 crc kubenswrapper[4961]: I1205 17:35:49.923249 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f99802e1-193a-41c4-9e99-6012f6063111","Type":"ContainerDied","Data":"a8b7573c7197d2c9174213c5c738425ed39697e08820ca055578421bd7621b1d"} Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.105697 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 17:35:50 crc kubenswrapper[4961]: E1205 17:35:50.106359 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="45c41860-47e0-4fc6-b9e2-73308ab35bfe" containerName="collect-profiles" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.106377 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="45c41860-47e0-4fc6-b9e2-73308ab35bfe" containerName="collect-profiles" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.106510 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="45c41860-47e0-4fc6-b9e2-73308ab35bfe" containerName="collect-profiles" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.107021 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.111170 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.111977 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.118638 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.249429 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fee593cf-ca69-4de6-b453-a727ffe508b7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.249585 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fee593cf-ca69-4de6-b453-a727ffe508b7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.351058 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fee593cf-ca69-4de6-b453-a727ffe508b7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.351146 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fee593cf-ca69-4de6-b453-a727ffe508b7-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.351274 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fee593cf-ca69-4de6-b453-a727ffe508b7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.371499 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fee593cf-ca69-4de6-b453-a727ffe508b7-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.403119 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:50 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:50 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:50 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.403186 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:50 crc kubenswrapper[4961]: I1205 17:35:50.452651 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.085065 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.404417 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:51 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:51 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:51 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.404992 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:51 crc kubenswrapper[4961]: W1205 17:35:51.520114 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podfee593cf_ca69_4de6_b453_a727ffe508b7.slice/crio-a34600f29d48b647b974915b107f39bf08acbcebcc037066c30e2456ee2788dc WatchSource:0}: Error finding container a34600f29d48b647b974915b107f39bf08acbcebcc037066c30e2456ee2788dc: Status 404 returned error can't find the container with id a34600f29d48b647b974915b107f39bf08acbcebcc037066c30e2456ee2788dc Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.602523 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.703593 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f99802e1-193a-41c4-9e99-6012f6063111-kube-api-access\") pod \"f99802e1-193a-41c4-9e99-6012f6063111\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.703722 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f99802e1-193a-41c4-9e99-6012f6063111-kubelet-dir\") pod \"f99802e1-193a-41c4-9e99-6012f6063111\" (UID: \"f99802e1-193a-41c4-9e99-6012f6063111\") " Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.704092 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f99802e1-193a-41c4-9e99-6012f6063111-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "f99802e1-193a-41c4-9e99-6012f6063111" (UID: "f99802e1-193a-41c4-9e99-6012f6063111"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.730174 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f99802e1-193a-41c4-9e99-6012f6063111-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "f99802e1-193a-41c4-9e99-6012f6063111" (UID: "f99802e1-193a-41c4-9e99-6012f6063111"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.805875 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/f99802e1-193a-41c4-9e99-6012f6063111-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.805917 4961 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/f99802e1-193a-41c4-9e99-6012f6063111-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.953976 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"f99802e1-193a-41c4-9e99-6012f6063111","Type":"ContainerDied","Data":"004b8b3afaab00811423a239e45567c8e481709f20dc63f8d115ec5a04091536"} Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.954017 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="004b8b3afaab00811423a239e45567c8e481709f20dc63f8d115ec5a04091536" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.954076 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Dec 05 17:35:51 crc kubenswrapper[4961]: I1205 17:35:51.964350 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fee593cf-ca69-4de6-b453-a727ffe508b7","Type":"ContainerStarted","Data":"a34600f29d48b647b974915b107f39bf08acbcebcc037066c30e2456ee2788dc"} Dec 05 17:35:52 crc kubenswrapper[4961]: I1205 17:35:52.380002 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:52 crc kubenswrapper[4961]: I1205 17:35:52.386277 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-8vtdg" Dec 05 17:35:52 crc kubenswrapper[4961]: I1205 17:35:52.406332 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:52 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:52 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:52 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:52 crc kubenswrapper[4961]: I1205 17:35:52.406410 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:52 crc kubenswrapper[4961]: I1205 17:35:52.993329 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fee593cf-ca69-4de6-b453-a727ffe508b7","Type":"ContainerStarted","Data":"4cc6af97484bca98283eb9a6c36c0c41588cc940b5eba4b6f4cd0eb8e2e6133d"} Dec 05 17:35:53 crc kubenswrapper[4961]: I1205 17:35:53.422310 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:53 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:53 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:53 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:53 crc kubenswrapper[4961]: I1205 17:35:53.422404 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:54 crc kubenswrapper[4961]: I1205 17:35:54.062571 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=4.062548562 podStartE2EDuration="4.062548562s" podCreationTimestamp="2025-12-05 17:35:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:35:54.057425501 +0000 UTC m=+160.118575984" watchObservedRunningTime="2025-12-05 17:35:54.062548562 +0000 UTC m=+160.123699035" Dec 05 17:35:54 crc kubenswrapper[4961]: I1205 17:35:54.403439 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:54 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:54 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:54 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:54 crc kubenswrapper[4961]: I1205 17:35:54.403505 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:55 crc kubenswrapper[4961]: I1205 17:35:55.044883 4961 generic.go:334] "Generic (PLEG): container finished" podID="fee593cf-ca69-4de6-b453-a727ffe508b7" containerID="4cc6af97484bca98283eb9a6c36c0c41588cc940b5eba4b6f4cd0eb8e2e6133d" exitCode=0 Dec 05 17:35:55 crc kubenswrapper[4961]: I1205 17:35:55.044945 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fee593cf-ca69-4de6-b453-a727ffe508b7","Type":"ContainerDied","Data":"4cc6af97484bca98283eb9a6c36c0c41588cc940b5eba4b6f4cd0eb8e2e6133d"} Dec 05 17:35:55 crc kubenswrapper[4961]: I1205 17:35:55.404025 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:55 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:55 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:55 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:55 crc kubenswrapper[4961]: I1205 17:35:55.404082 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:56 crc kubenswrapper[4961]: I1205 17:35:56.021074 4961 patch_prober.go:28] interesting pod/console-f9d7485db-sp65b container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" start-of-body= Dec 05 17:35:56 crc kubenswrapper[4961]: I1205 17:35:56.021424 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-sp65b" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerName="console" probeResult="failure" output="Get \"https://10.217.0.20:8443/health\": dial tcp 10.217.0.20:8443: connect: connection refused" Dec 05 17:35:56 crc kubenswrapper[4961]: I1205 17:35:56.407754 4961 patch_prober.go:28] interesting pod/router-default-5444994796-nksfl container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Dec 05 17:35:56 crc kubenswrapper[4961]: [-]has-synced failed: reason withheld Dec 05 17:35:56 crc kubenswrapper[4961]: [+]process-running ok Dec 05 17:35:56 crc kubenswrapper[4961]: healthz check failed Dec 05 17:35:56 crc kubenswrapper[4961]: I1205 17:35:56.407881 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-nksfl" podUID="84d294ab-bfe4-4ec2-ba8a-6a5540d570b8" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.246241 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.246308 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.534577 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.537214 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-nksfl" Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.564841 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.564912 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.565147 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:35:57 crc kubenswrapper[4961]: I1205 17:35:57.565246 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:35:58 crc kubenswrapper[4961]: I1205 17:35:58.778557 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:58 crc kubenswrapper[4961]: I1205 17:35:58.800245 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/6f24429b-a57e-47d0-8354-87ff9d6bcee8-metrics-certs\") pod \"network-metrics-daemon-pgc6p\" (UID: \"6f24429b-a57e-47d0-8354-87ff9d6bcee8\") " pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:35:58 crc kubenswrapper[4961]: I1205 17:35:58.814466 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-pgc6p" Dec 05 17:36:05 crc kubenswrapper[4961]: I1205 17:36:05.874064 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:36:05 crc kubenswrapper[4961]: I1205 17:36:05.968475 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fee593cf-ca69-4de6-b453-a727ffe508b7-kube-api-access\") pod \"fee593cf-ca69-4de6-b453-a727ffe508b7\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " Dec 05 17:36:05 crc kubenswrapper[4961]: I1205 17:36:05.968534 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fee593cf-ca69-4de6-b453-a727ffe508b7-kubelet-dir\") pod \"fee593cf-ca69-4de6-b453-a727ffe508b7\" (UID: \"fee593cf-ca69-4de6-b453-a727ffe508b7\") " Dec 05 17:36:05 crc kubenswrapper[4961]: I1205 17:36:05.969901 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fee593cf-ca69-4de6-b453-a727ffe508b7-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "fee593cf-ca69-4de6-b453-a727ffe508b7" (UID: "fee593cf-ca69-4de6-b453-a727ffe508b7"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:36:05 crc kubenswrapper[4961]: I1205 17:36:05.978483 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fee593cf-ca69-4de6-b453-a727ffe508b7-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "fee593cf-ca69-4de6-b453-a727ffe508b7" (UID: "fee593cf-ca69-4de6-b453-a727ffe508b7"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.008504 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.041289 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.075272 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/fee593cf-ca69-4de6-b453-a727ffe508b7-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.075323 4961 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/fee593cf-ca69-4de6-b453-a727ffe508b7-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.175795 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.175813 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"fee593cf-ca69-4de6-b453-a727ffe508b7","Type":"ContainerDied","Data":"a34600f29d48b647b974915b107f39bf08acbcebcc037066c30e2456ee2788dc"} Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.175885 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a34600f29d48b647b974915b107f39bf08acbcebcc037066c30e2456ee2788dc" Dec 05 17:36:06 crc kubenswrapper[4961]: I1205 17:36:06.272949 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.565769 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.566100 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.566672 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.566728 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.566803 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.567657 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"5c3d9317c8d497d6c64d27d4715889bd0be1d0b28b899b6080c59d7812916be4"} pod="openshift-console/downloads-7954f5f757-2msfx" containerMessage="Container download-server failed liveness probe, will be restarted" Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.567743 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" containerID="cri-o://5c3d9317c8d497d6c64d27d4715889bd0be1d0b28b899b6080c59d7812916be4" gracePeriod=2 Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.568431 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:07 crc kubenswrapper[4961]: I1205 17:36:07.568475 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:08 crc kubenswrapper[4961]: I1205 17:36:08.194124 4961 generic.go:334] "Generic (PLEG): container finished" podID="694a4dea-466c-48de-b828-dded0b4e3309" containerID="5c3d9317c8d497d6c64d27d4715889bd0be1d0b28b899b6080c59d7812916be4" exitCode=0 Dec 05 17:36:08 crc kubenswrapper[4961]: I1205 17:36:08.194174 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2msfx" event={"ID":"694a4dea-466c-48de-b828-dded0b4e3309","Type":"ContainerDied","Data":"5c3d9317c8d497d6c64d27d4715889bd0be1d0b28b899b6080c59d7812916be4"} Dec 05 17:36:17 crc kubenswrapper[4961]: I1205 17:36:17.562175 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:17 crc kubenswrapper[4961]: I1205 17:36:17.563116 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:17 crc kubenswrapper[4961]: I1205 17:36:17.797462 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-6fzkv" Dec 05 17:36:23 crc kubenswrapper[4961]: I1205 17:36:23.039983 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.698880 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 17:36:24 crc kubenswrapper[4961]: E1205 17:36:24.699134 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fee593cf-ca69-4de6-b453-a727ffe508b7" containerName="pruner" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.699154 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="fee593cf-ca69-4de6-b453-a727ffe508b7" containerName="pruner" Dec 05 17:36:24 crc kubenswrapper[4961]: E1205 17:36:24.699172 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f99802e1-193a-41c4-9e99-6012f6063111" containerName="pruner" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.699178 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f99802e1-193a-41c4-9e99-6012f6063111" containerName="pruner" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.699287 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="fee593cf-ca69-4de6-b453-a727ffe508b7" containerName="pruner" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.699301 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f99802e1-193a-41c4-9e99-6012f6063111" containerName="pruner" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.699653 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.701808 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.701828 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.714638 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.790499 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.790559 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.891575 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.891704 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.892086 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:24 crc kubenswrapper[4961]: I1205 17:36:24.916896 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:25 crc kubenswrapper[4961]: I1205 17:36:25.017995 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:25 crc kubenswrapper[4961]: E1205 17:36:25.675386 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 17:36:25 crc kubenswrapper[4961]: E1205 17:36:25.675999 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qbz4g,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-j4wq7_openshift-marketplace(0b4e16a7-3656-4a7f-b0a9-4a8c5210d292): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:25 crc kubenswrapper[4961]: E1205 17:36:25.677230 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-j4wq7" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" Dec 05 17:36:27 crc kubenswrapper[4961]: I1205 17:36:27.245225 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:36:27 crc kubenswrapper[4961]: I1205 17:36:27.245544 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:36:27 crc kubenswrapper[4961]: I1205 17:36:27.563350 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:27 crc kubenswrapper[4961]: I1205 17:36:27.563444 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:28 crc kubenswrapper[4961]: E1205 17:36:28.298484 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-j4wq7" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" Dec 05 17:36:28 crc kubenswrapper[4961]: E1205 17:36:28.696942 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 17:36:28 crc kubenswrapper[4961]: E1205 17:36:28.697105 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-6b877,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-c5wk7_openshift-marketplace(eb4981fe-4b26-4626-8c45-ba311dc825d9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:28 crc kubenswrapper[4961]: E1205 17:36:28.698273 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-c5wk7" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.711179 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.712437 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.712530 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.758514 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db9da7f5-eb95-470f-8f02-55ad79822cb5-kube-api-access\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.758553 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-kubelet-dir\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.758602 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-var-lock\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.860245 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db9da7f5-eb95-470f-8f02-55ad79822cb5-kube-api-access\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.860315 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-kubelet-dir\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.860380 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-var-lock\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.860510 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-kubelet-dir\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.860556 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-var-lock\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:29 crc kubenswrapper[4961]: I1205 17:36:29.882957 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db9da7f5-eb95-470f-8f02-55ad79822cb5-kube-api-access\") pod \"installer-9-crc\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:30 crc kubenswrapper[4961]: I1205 17:36:30.049370 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:36:30 crc kubenswrapper[4961]: E1205 17:36:30.699449 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Dec 05 17:36:30 crc kubenswrapper[4961]: E1205 17:36:30.699622 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-xg48n,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-2djc5_openshift-marketplace(39220eba-93d5-4e82-89a9-97d1383522a4): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:30 crc kubenswrapper[4961]: E1205 17:36:30.700911 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-2djc5" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.893051 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-c5wk7" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.893182 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-2djc5" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.963811 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.963992 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7jjb9,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-4rgn2_openshift-marketplace(81f0c1f1-bb8e-4ae4-a87b-460ec2f12258): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.965198 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-4rgn2" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.984610 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.984819 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-crfnl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-2vgjf_openshift-marketplace(4ab60ade-3b80-4731-9fa7-09c77a0fa666): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:31 crc kubenswrapper[4961]: E1205 17:36:31.986013 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-2vgjf" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.226000 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-2vgjf" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.226000 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-4rgn2" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.303544 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.303714 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-z7khz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-kgvwm_openshift-marketplace(6ae1ad4f-caa6-49b9-9d32-6905088903bf): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.304861 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-kgvwm" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.362296 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.362908 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-45prm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-hdlff_openshift-marketplace(af3d9725-8492-4a3a-b125-a545b2d4c8c7): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.364964 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-hdlff" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.369696 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-kgvwm" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.387518 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.387872 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8x959,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-7wrmp_openshift-marketplace(1a8bb3a1-6049-4a0e-8d86-3ddd148686b9): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Dec 05 17:36:33 crc kubenswrapper[4961]: E1205 17:36:33.389230 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-7wrmp" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" Dec 05 17:36:33 crc kubenswrapper[4961]: I1205 17:36:33.675883 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-pgc6p"] Dec 05 17:36:33 crc kubenswrapper[4961]: W1205 17:36:33.679954 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6f24429b_a57e_47d0_8354_87ff9d6bcee8.slice/crio-08a3924d4699dac5a424f8e03c7efb422c5acb190703d2fd0382b65f9c8a3ef7 WatchSource:0}: Error finding container 08a3924d4699dac5a424f8e03c7efb422c5acb190703d2fd0382b65f9c8a3ef7: Status 404 returned error can't find the container with id 08a3924d4699dac5a424f8e03c7efb422c5acb190703d2fd0382b65f9c8a3ef7 Dec 05 17:36:33 crc kubenswrapper[4961]: I1205 17:36:33.776941 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Dec 05 17:36:33 crc kubenswrapper[4961]: I1205 17:36:33.779760 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Dec 05 17:36:33 crc kubenswrapper[4961]: W1205 17:36:33.796403 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-poddb9da7f5_eb95_470f_8f02_55ad79822cb5.slice/crio-5931a56c10a76928228cb59e934a278144aa808c780982b55483b05c40a4fef5 WatchSource:0}: Error finding container 5931a56c10a76928228cb59e934a278144aa808c780982b55483b05c40a4fef5: Status 404 returned error can't find the container with id 5931a56c10a76928228cb59e934a278144aa808c780982b55483b05c40a4fef5 Dec 05 17:36:33 crc kubenswrapper[4961]: W1205 17:36:33.803948 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod8fb7aef8_279c_4b6c_a9f7_916892f33c5a.slice/crio-689705a0ec67874e31d0f6f02dce2b8bc5b322ea130d8cf17a52c5bfd205e18e WatchSource:0}: Error finding container 689705a0ec67874e31d0f6f02dce2b8bc5b322ea130d8cf17a52c5bfd205e18e: Status 404 returned error can't find the container with id 689705a0ec67874e31d0f6f02dce2b8bc5b322ea130d8cf17a52c5bfd205e18e Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.361636 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"db9da7f5-eb95-470f-8f02-55ad79822cb5","Type":"ContainerStarted","Data":"a66c3b51e3e34591008b47fa21b10876e91c057965f7e1a98343224583d4ea55"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.362266 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"db9da7f5-eb95-470f-8f02-55ad79822cb5","Type":"ContainerStarted","Data":"5931a56c10a76928228cb59e934a278144aa808c780982b55483b05c40a4fef5"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.364344 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8fb7aef8-279c-4b6c-a9f7-916892f33c5a","Type":"ContainerStarted","Data":"c5c3f81c73e147d375888068ea223d588c41c5c222ba23a8bdd68478dc8d06a4"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.364388 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8fb7aef8-279c-4b6c-a9f7-916892f33c5a","Type":"ContainerStarted","Data":"689705a0ec67874e31d0f6f02dce2b8bc5b322ea130d8cf17a52c5bfd205e18e"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.367522 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" event={"ID":"6f24429b-a57e-47d0-8354-87ff9d6bcee8","Type":"ContainerStarted","Data":"5fd070bef5b6e81b6b4dd36130b6f1fcc21eb4351f268d4b0303beafa85b57c1"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.367566 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" event={"ID":"6f24429b-a57e-47d0-8354-87ff9d6bcee8","Type":"ContainerStarted","Data":"9e3fe27c5a81ec5d22737a2e51680395ccd5dc90db9061ca347dfd1b7ade5637"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.367579 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-pgc6p" event={"ID":"6f24429b-a57e-47d0-8354-87ff9d6bcee8","Type":"ContainerStarted","Data":"08a3924d4699dac5a424f8e03c7efb422c5acb190703d2fd0382b65f9c8a3ef7"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.369456 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2msfx" event={"ID":"694a4dea-466c-48de-b828-dded0b4e3309","Type":"ContainerStarted","Data":"79e514106f7a5ad984f2a55b31e315cc49a554fb0d6edd943dec2a94f690b4ad"} Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.370139 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.370328 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.370370 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:34 crc kubenswrapper[4961]: E1205 17:36:34.372233 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-7wrmp" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" Dec 05 17:36:34 crc kubenswrapper[4961]: E1205 17:36:34.372237 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-hdlff" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.381237 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=5.381222086 podStartE2EDuration="5.381222086s" podCreationTimestamp="2025-12-05 17:36:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:34.377855787 +0000 UTC m=+200.439006280" watchObservedRunningTime="2025-12-05 17:36:34.381222086 +0000 UTC m=+200.442372559" Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.417089 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-pgc6p" podStartSLOduration=178.417069968 podStartE2EDuration="2m58.417069968s" podCreationTimestamp="2025-12-05 17:33:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:34.415950121 +0000 UTC m=+200.477100614" watchObservedRunningTime="2025-12-05 17:36:34.417069968 +0000 UTC m=+200.478220441" Dec 05 17:36:34 crc kubenswrapper[4961]: I1205 17:36:34.465984 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=10.465957088 podStartE2EDuration="10.465957088s" podCreationTimestamp="2025-12-05 17:36:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:36:34.457680442 +0000 UTC m=+200.518830915" watchObservedRunningTime="2025-12-05 17:36:34.465957088 +0000 UTC m=+200.527107561" Dec 05 17:36:35 crc kubenswrapper[4961]: I1205 17:36:35.378095 4961 generic.go:334] "Generic (PLEG): container finished" podID="8fb7aef8-279c-4b6c-a9f7-916892f33c5a" containerID="c5c3f81c73e147d375888068ea223d588c41c5c222ba23a8bdd68478dc8d06a4" exitCode=0 Dec 05 17:36:35 crc kubenswrapper[4961]: I1205 17:36:35.378157 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8fb7aef8-279c-4b6c-a9f7-916892f33c5a","Type":"ContainerDied","Data":"c5c3f81c73e147d375888068ea223d588c41c5c222ba23a8bdd68478dc8d06a4"} Dec 05 17:36:35 crc kubenswrapper[4961]: I1205 17:36:35.379832 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:35 crc kubenswrapper[4961]: I1205 17:36:35.379920 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.682672 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.756955 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kube-api-access\") pod \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.757146 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kubelet-dir\") pod \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\" (UID: \"8fb7aef8-279c-4b6c-a9f7-916892f33c5a\") " Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.757485 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "8fb7aef8-279c-4b6c-a9f7-916892f33c5a" (UID: "8fb7aef8-279c-4b6c-a9f7-916892f33c5a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.764191 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "8fb7aef8-279c-4b6c-a9f7-916892f33c5a" (UID: "8fb7aef8-279c-4b6c-a9f7-916892f33c5a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.858862 4961 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:36 crc kubenswrapper[4961]: I1205 17:36:36.858900 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/8fb7aef8-279c-4b6c-a9f7-916892f33c5a-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.391268 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"8fb7aef8-279c-4b6c-a9f7-916892f33c5a","Type":"ContainerDied","Data":"689705a0ec67874e31d0f6f02dce2b8bc5b322ea130d8cf17a52c5bfd205e18e"} Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.391518 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="689705a0ec67874e31d0f6f02dce2b8bc5b322ea130d8cf17a52c5bfd205e18e" Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.391307 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.561289 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.561358 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.561296 4961 patch_prober.go:28] interesting pod/downloads-7954f5f757-2msfx container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" start-of-body= Dec 05 17:36:37 crc kubenswrapper[4961]: I1205 17:36:37.561651 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2msfx" podUID="694a4dea-466c-48de-b828-dded0b4e3309" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.14:8080/\": dial tcp 10.217.0.14:8080: connect: connection refused" Dec 05 17:36:44 crc kubenswrapper[4961]: I1205 17:36:44.435867 4961 generic.go:334] "Generic (PLEG): container finished" podID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerID="242f80cb3e436632876cfe15cbd30d15f87a162d43d64c8f8aad8a68db8401f8" exitCode=0 Dec 05 17:36:44 crc kubenswrapper[4961]: I1205 17:36:44.435949 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4wq7" event={"ID":"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292","Type":"ContainerDied","Data":"242f80cb3e436632876cfe15cbd30d15f87a162d43d64c8f8aad8a68db8401f8"} Dec 05 17:36:45 crc kubenswrapper[4961]: I1205 17:36:45.440976 4961 generic.go:334] "Generic (PLEG): container finished" podID="39220eba-93d5-4e82-89a9-97d1383522a4" containerID="7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a" exitCode=0 Dec 05 17:36:45 crc kubenswrapper[4961]: I1205 17:36:45.441035 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2djc5" event={"ID":"39220eba-93d5-4e82-89a9-97d1383522a4","Type":"ContainerDied","Data":"7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a"} Dec 05 17:36:45 crc kubenswrapper[4961]: I1205 17:36:45.442966 4961 generic.go:334] "Generic (PLEG): container finished" podID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerID="93a1ebbd31bc59273eba0da9e2415250afdb5c1d0a2e6cfa6046c8af5295d094" exitCode=0 Dec 05 17:36:45 crc kubenswrapper[4961]: I1205 17:36:45.443011 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wk7" event={"ID":"eb4981fe-4b26-4626-8c45-ba311dc825d9","Type":"ContainerDied","Data":"93a1ebbd31bc59273eba0da9e2415250afdb5c1d0a2e6cfa6046c8af5295d094"} Dec 05 17:36:47 crc kubenswrapper[4961]: I1205 17:36:47.567516 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2msfx" Dec 05 17:36:49 crc kubenswrapper[4961]: I1205 17:36:49.467528 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4wq7" event={"ID":"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292","Type":"ContainerStarted","Data":"00cfd59f70f98ac728eeb6e4c2684cccfa400aa4756d1c7297314af4ed3e4328"} Dec 05 17:36:49 crc kubenswrapper[4961]: I1205 17:36:49.485170 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-j4wq7" podStartSLOduration=3.609024883 podStartE2EDuration="1m5.485146949s" podCreationTimestamp="2025-12-05 17:35:44 +0000 UTC" firstStartedPulling="2025-12-05 17:35:46.649353485 +0000 UTC m=+152.710503958" lastFinishedPulling="2025-12-05 17:36:48.525475551 +0000 UTC m=+214.586626024" observedRunningTime="2025-12-05 17:36:49.484648628 +0000 UTC m=+215.545799111" watchObservedRunningTime="2025-12-05 17:36:49.485146949 +0000 UTC m=+215.546297422" Dec 05 17:36:54 crc kubenswrapper[4961]: I1205 17:36:54.997286 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:36:54 crc kubenswrapper[4961]: I1205 17:36:54.997628 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:36:55 crc kubenswrapper[4961]: I1205 17:36:55.247098 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:36:55 crc kubenswrapper[4961]: I1205 17:36:55.536707 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:36:55 crc kubenswrapper[4961]: I1205 17:36:55.805146 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4wq7"] Dec 05 17:36:57 crc kubenswrapper[4961]: I1205 17:36:57.246232 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:36:57 crc kubenswrapper[4961]: I1205 17:36:57.246295 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:36:57 crc kubenswrapper[4961]: I1205 17:36:57.246343 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:36:57 crc kubenswrapper[4961]: I1205 17:36:57.247015 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:36:57 crc kubenswrapper[4961]: I1205 17:36:57.247083 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8" gracePeriod=600 Dec 05 17:36:57 crc kubenswrapper[4961]: I1205 17:36:57.504593 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-j4wq7" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="registry-server" containerID="cri-o://00cfd59f70f98ac728eeb6e4c2684cccfa400aa4756d1c7297314af4ed3e4328" gracePeriod=2 Dec 05 17:36:58 crc kubenswrapper[4961]: I1205 17:36:58.510957 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2djc5" event={"ID":"39220eba-93d5-4e82-89a9-97d1383522a4","Type":"ContainerStarted","Data":"c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0"} Dec 05 17:36:58 crc kubenswrapper[4961]: I1205 17:36:58.526156 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2djc5" podStartSLOduration=11.219642998 podStartE2EDuration="1m14.526140984s" podCreationTimestamp="2025-12-05 17:35:44 +0000 UTC" firstStartedPulling="2025-12-05 17:35:46.706499182 +0000 UTC m=+152.767649655" lastFinishedPulling="2025-12-05 17:36:50.012997168 +0000 UTC m=+216.074147641" observedRunningTime="2025-12-05 17:36:58.524890404 +0000 UTC m=+224.586040897" watchObservedRunningTime="2025-12-05 17:36:58.526140984 +0000 UTC m=+224.587291457" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.519192 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8" exitCode=0 Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.519329 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8"} Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.522239 4961 generic.go:334] "Generic (PLEG): container finished" podID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerID="00cfd59f70f98ac728eeb6e4c2684cccfa400aa4756d1c7297314af4ed3e4328" exitCode=0 Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.522275 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4wq7" event={"ID":"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292","Type":"ContainerDied","Data":"00cfd59f70f98ac728eeb6e4c2684cccfa400aa4756d1c7297314af4ed3e4328"} Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.522296 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-j4wq7" event={"ID":"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292","Type":"ContainerDied","Data":"539d8c02ed3ee3c4e95df113e0022abeb7fe3fe9c8507b031887a47955409a80"} Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.522309 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="539d8c02ed3ee3c4e95df113e0022abeb7fe3fe9c8507b031887a47955409a80" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.601282 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.660050 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-utilities\") pod \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.660163 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbz4g\" (UniqueName: \"kubernetes.io/projected/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-kube-api-access-qbz4g\") pod \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.661329 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-utilities" (OuterVolumeSpecName: "utilities") pod "0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" (UID: "0b4e16a7-3656-4a7f-b0a9-4a8c5210d292"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.662033 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-catalog-content\") pod \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\" (UID: \"0b4e16a7-3656-4a7f-b0a9-4a8c5210d292\") " Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.663023 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.667727 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-kube-api-access-qbz4g" (OuterVolumeSpecName: "kube-api-access-qbz4g") pod "0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" (UID: "0b4e16a7-3656-4a7f-b0a9-4a8c5210d292"). InnerVolumeSpecName "kube-api-access-qbz4g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.681221 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" (UID: "0b4e16a7-3656-4a7f-b0a9-4a8c5210d292"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.763914 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbz4g\" (UniqueName: \"kubernetes.io/projected/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-kube-api-access-qbz4g\") on node \"crc\" DevicePath \"\"" Dec 05 17:36:59 crc kubenswrapper[4961]: I1205 17:36:59.764246 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.531268 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerStarted","Data":"99da141428358bbd14e7287820d44d32e97a23b15949f57b49553b81dbdc34c0"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.534205 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wk7" event={"ID":"eb4981fe-4b26-4626-8c45-ba311dc825d9","Type":"ContainerStarted","Data":"39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.536717 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerStarted","Data":"a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.540157 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerStarted","Data":"57cc74eed4e87ecbf9382404a061301a4db36292cfe05c048c1e834570847d34"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.543901 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"ca99d8e70567623b8c5ad00a1a0c15543d5fe98ccba8ac7da75e4ab98c660364"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.547118 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerStarted","Data":"5b1983c716ff360bda4af8f00baedd8b64452ee0663836fb5e011c31884fa4ae"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.549561 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerStarted","Data":"c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68"} Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.549598 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-j4wq7" Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.661198 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-c5wk7" podStartSLOduration=5.90887937 podStartE2EDuration="1m18.66117089s" podCreationTimestamp="2025-12-05 17:35:42 +0000 UTC" firstStartedPulling="2025-12-05 17:35:46.597710399 +0000 UTC m=+152.658860872" lastFinishedPulling="2025-12-05 17:36:59.350001899 +0000 UTC m=+225.411152392" observedRunningTime="2025-12-05 17:37:00.635666915 +0000 UTC m=+226.696817408" watchObservedRunningTime="2025-12-05 17:37:00.66117089 +0000 UTC m=+226.722321363" Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.733028 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4wq7"] Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.736952 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-j4wq7"] Dec 05 17:37:00 crc kubenswrapper[4961]: I1205 17:37:00.937811 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" path="/var/lib/kubelet/pods/0b4e16a7-3656-4a7f-b0a9-4a8c5210d292/volumes" Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.557320 4961 generic.go:334] "Generic (PLEG): container finished" podID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerID="99da141428358bbd14e7287820d44d32e97a23b15949f57b49553b81dbdc34c0" exitCode=0 Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.557380 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerDied","Data":"99da141428358bbd14e7287820d44d32e97a23b15949f57b49553b81dbdc34c0"} Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.561183 4961 generic.go:334] "Generic (PLEG): container finished" podID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerID="a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2" exitCode=0 Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.561238 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerDied","Data":"a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2"} Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.568585 4961 generic.go:334] "Generic (PLEG): container finished" podID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerID="57cc74eed4e87ecbf9382404a061301a4db36292cfe05c048c1e834570847d34" exitCode=0 Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.568744 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerDied","Data":"57cc74eed4e87ecbf9382404a061301a4db36292cfe05c048c1e834570847d34"} Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.577076 4961 generic.go:334] "Generic (PLEG): container finished" podID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerID="5b1983c716ff360bda4af8f00baedd8b64452ee0663836fb5e011c31884fa4ae" exitCode=0 Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.577164 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerDied","Data":"5b1983c716ff360bda4af8f00baedd8b64452ee0663836fb5e011c31884fa4ae"} Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.580001 4961 generic.go:334] "Generic (PLEG): container finished" podID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerID="c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68" exitCode=0 Dec 05 17:37:01 crc kubenswrapper[4961]: I1205 17:37:01.580963 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerDied","Data":"c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68"} Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.153190 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.153466 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.201982 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.600739 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerStarted","Data":"6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9"} Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.602753 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerStarted","Data":"f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca"} Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.604965 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerStarted","Data":"fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d"} Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.607173 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerStarted","Data":"f45d8913d22c5cf2feaf37342b571d9c407a87e665527de862aef9bbf04201b3"} Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.609208 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerStarted","Data":"b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76"} Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.623739 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-7wrmp" podStartSLOduration=5.811012018 podStartE2EDuration="1m21.623719619s" podCreationTimestamp="2025-12-05 17:35:42 +0000 UTC" firstStartedPulling="2025-12-05 17:35:46.702024835 +0000 UTC m=+152.763175308" lastFinishedPulling="2025-12-05 17:37:02.514732436 +0000 UTC m=+228.575882909" observedRunningTime="2025-12-05 17:37:03.621319132 +0000 UTC m=+229.682469625" watchObservedRunningTime="2025-12-05 17:37:03.623719619 +0000 UTC m=+229.684870092" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.644114 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-kgvwm" podStartSLOduration=5.059944381 podStartE2EDuration="1m21.644098062s" podCreationTimestamp="2025-12-05 17:35:42 +0000 UTC" firstStartedPulling="2025-12-05 17:35:46.477501266 +0000 UTC m=+152.538651739" lastFinishedPulling="2025-12-05 17:37:03.061654947 +0000 UTC m=+229.122805420" observedRunningTime="2025-12-05 17:37:03.641592753 +0000 UTC m=+229.702743246" watchObservedRunningTime="2025-12-05 17:37:03.644098062 +0000 UTC m=+229.705248535" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.659738 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-4rgn2" podStartSLOduration=3.70341559 podStartE2EDuration="1m18.659722014s" podCreationTimestamp="2025-12-05 17:35:45 +0000 UTC" firstStartedPulling="2025-12-05 17:35:47.874064274 +0000 UTC m=+153.935214747" lastFinishedPulling="2025-12-05 17:37:02.830370688 +0000 UTC m=+228.891521171" observedRunningTime="2025-12-05 17:37:03.65747531 +0000 UTC m=+229.718625803" watchObservedRunningTime="2025-12-05 17:37:03.659722014 +0000 UTC m=+229.720872487" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.679156 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-2vgjf" podStartSLOduration=3.815787896 podStartE2EDuration="1m18.679138114s" podCreationTimestamp="2025-12-05 17:35:45 +0000 UTC" firstStartedPulling="2025-12-05 17:35:47.85114576 +0000 UTC m=+153.912296233" lastFinishedPulling="2025-12-05 17:37:02.714495978 +0000 UTC m=+228.775646451" observedRunningTime="2025-12-05 17:37:03.674953634 +0000 UTC m=+229.736104117" watchObservedRunningTime="2025-12-05 17:37:03.679138114 +0000 UTC m=+229.740288587" Dec 05 17:37:03 crc kubenswrapper[4961]: I1205 17:37:03.696464 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-hdlff" podStartSLOduration=5.43403704 podStartE2EDuration="1m21.696448775s" podCreationTimestamp="2025-12-05 17:35:42 +0000 UTC" firstStartedPulling="2025-12-05 17:35:46.477763812 +0000 UTC m=+152.538914285" lastFinishedPulling="2025-12-05 17:37:02.740175527 +0000 UTC m=+228.801326020" observedRunningTime="2025-12-05 17:37:03.694084669 +0000 UTC m=+229.755235152" watchObservedRunningTime="2025-12-05 17:37:03.696448775 +0000 UTC m=+229.757599248" Dec 05 17:37:04 crc kubenswrapper[4961]: I1205 17:37:04.673927 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:37:04 crc kubenswrapper[4961]: I1205 17:37:04.674292 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:37:04 crc kubenswrapper[4961]: I1205 17:37:04.715310 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:37:05 crc kubenswrapper[4961]: I1205 17:37:05.662963 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:37:05 crc kubenswrapper[4961]: I1205 17:37:05.691185 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:37:05 crc kubenswrapper[4961]: I1205 17:37:05.691254 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:37:05 crc kubenswrapper[4961]: I1205 17:37:05.850925 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f9gdg"] Dec 05 17:37:06 crc kubenswrapper[4961]: I1205 17:37:06.018048 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:37:06 crc kubenswrapper[4961]: I1205 17:37:06.018112 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:37:06 crc kubenswrapper[4961]: I1205 17:37:06.725183 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-2vgjf" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="registry-server" probeResult="failure" output=< Dec 05 17:37:06 crc kubenswrapper[4961]: timeout: failed to connect service ":50051" within 1s Dec 05 17:37:06 crc kubenswrapper[4961]: > Dec 05 17:37:07 crc kubenswrapper[4961]: I1205 17:37:07.053946 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-4rgn2" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="registry-server" probeResult="failure" output=< Dec 05 17:37:07 crc kubenswrapper[4961]: timeout: failed to connect service ":50051" within 1s Dec 05 17:37:07 crc kubenswrapper[4961]: > Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.808434 4961 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.809289 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457" gracePeriod=15 Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.809338 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9" gracePeriod=15 Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.809399 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548" gracePeriod=15 Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.809399 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998" gracePeriod=15 Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.809415 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0" gracePeriod=15 Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810039 4961 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810303 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810323 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810340 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="extract-utilities" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810351 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="extract-utilities" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810361 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="registry-server" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810370 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="registry-server" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810383 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810390 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810398 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810406 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810417 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810425 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810439 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8fb7aef8-279c-4b6c-a9f7-916892f33c5a" containerName="pruner" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810447 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="8fb7aef8-279c-4b6c-a9f7-916892f33c5a" containerName="pruner" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810460 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="extract-content" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810469 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="extract-content" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810479 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810487 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810499 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810506 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810620 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810631 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810642 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810653 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810664 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="8fb7aef8-279c-4b6c-a9f7-916892f33c5a" containerName="pruner" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810672 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b4e16a7-3656-4a7f-b0a9-4a8c5210d292" containerName="registry-server" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810683 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810692 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:37:11 crc kubenswrapper[4961]: E1205 17:37:11.810828 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.810838 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.814006 4961 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.814821 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.818352 4961 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.821006 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.821072 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.821413 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.821455 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.821510 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.821548 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.822410 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.822470 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.853827 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924586 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924660 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924701 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924748 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924802 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924844 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924967 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.924994 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925048 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925090 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925085 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925122 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925401 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925410 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925440 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:11 crc kubenswrapper[4961]: I1205 17:37:11.925469 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.154769 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:12 crc kubenswrapper[4961]: W1205 17:37:12.318912 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-2f846b564875f25615320243d724c7fd24c2c57a563012d7c3de40a6c77d94fc WatchSource:0}: Error finding container 2f846b564875f25615320243d724c7fd24c2c57a563012d7c3de40a6c77d94fc: Status 404 returned error can't find the container with id 2f846b564875f25615320243d724c7fd24c2c57a563012d7c3de40a6c77d94fc Dec 05 17:37:12 crc kubenswrapper[4961]: E1205 17:37:12.323442 4961 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.17:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e625797ae4cdd openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:37:12.322272477 +0000 UTC m=+238.383422950,LastTimestamp:2025-12-05 17:37:12.322272477 +0000 UTC m=+238.383422950,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.623409 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.623697 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.659251 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2f846b564875f25615320243d724c7fd24c2c57a563012d7c3de40a6c77d94fc"} Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.662160 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.663643 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.664308 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0" exitCode=2 Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.688577 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.689851 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:12 crc kubenswrapper[4961]: I1205 17:37:12.690259 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:12 crc kubenswrapper[4961]: E1205 17:37:12.940292 4961 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.17:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" volumeName="registry-storage" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.001816 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.001937 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.052888 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.053936 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.054480 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.055209 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.115564 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.115611 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.175310 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.178337 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.178655 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.178859 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.179031 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.194467 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.196135 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.196339 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.196683 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.197277 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.197528 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.724002 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.724742 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.725293 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.725833 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.726180 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.726596 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.739186 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.739735 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.740272 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.740812 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.741080 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:13 crc kubenswrapper[4961]: I1205 17:37:13.741463 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.517189 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.518083 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.518591 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.519881 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.520657 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.521128 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.869168 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.869598 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.870291 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.870908 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:14 crc kubenswrapper[4961]: I1205 17:37:14.871248 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.648114 4961 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.649075 4961 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.649337 4961 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.650454 4961 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.650759 4961 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.650895 4961 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.651165 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="200ms" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.694852 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.696479 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.697424 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9" exitCode=0 Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.697488 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548" exitCode=0 Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.697499 4961 scope.go:117] "RemoveContainer" containerID="94d7cba28865ea9aa6f42d93f639ce3f3ccfdffba71b399e331e75932f9c8a04" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.697511 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998" exitCode=0 Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.735470 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.736842 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.737312 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.737617 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.737920 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.738187 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.738443 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.767516 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.768109 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.768553 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.768962 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.769278 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.769565 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: I1205 17:37:15.769931 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:15 crc kubenswrapper[4961]: E1205 17:37:15.852601 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="400ms" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.081160 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.082118 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.082683 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.083531 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.083914 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.084445 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.085001 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.085667 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.129561 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.130242 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.130864 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.131356 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.131694 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.131993 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.132343 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.132605 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:16 crc kubenswrapper[4961]: E1205 17:37:16.254045 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="800ms" Dec 05 17:37:16 crc kubenswrapper[4961]: I1205 17:37:16.704363 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f"} Dec 05 17:37:17 crc kubenswrapper[4961]: E1205 17:37:17.054746 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="1.6s" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.713815 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.715214 4961 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457" exitCode=0 Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.717479 4961 generic.go:334] "Generic (PLEG): container finished" podID="db9da7f5-eb95-470f-8f02-55ad79822cb5" containerID="a66c3b51e3e34591008b47fa21b10876e91c057965f7e1a98343224583d4ea55" exitCode=0 Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.717576 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"db9da7f5-eb95-470f-8f02-55ad79822cb5","Type":"ContainerDied","Data":"a66c3b51e3e34591008b47fa21b10876e91c057965f7e1a98343224583d4ea55"} Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.718457 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.718908 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.719210 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.719562 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.719844 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.722141 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.723067 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:17 crc kubenswrapper[4961]: I1205 17:37:17.725528 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: E1205 17:37:18.655974 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="3.2s" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.953206 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.954457 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.955035 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.955634 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.956507 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.956908 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.957272 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.957621 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:18 crc kubenswrapper[4961]: I1205 17:37:18.958029 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.064833 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-kubelet-dir\") pod \"db9da7f5-eb95-470f-8f02-55ad79822cb5\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.064922 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db9da7f5-eb95-470f-8f02-55ad79822cb5-kube-api-access\") pod \"db9da7f5-eb95-470f-8f02-55ad79822cb5\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.064955 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-var-lock\") pod \"db9da7f5-eb95-470f-8f02-55ad79822cb5\" (UID: \"db9da7f5-eb95-470f-8f02-55ad79822cb5\") " Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.064968 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "db9da7f5-eb95-470f-8f02-55ad79822cb5" (UID: "db9da7f5-eb95-470f-8f02-55ad79822cb5"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.065082 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-var-lock" (OuterVolumeSpecName: "var-lock") pod "db9da7f5-eb95-470f-8f02-55ad79822cb5" (UID: "db9da7f5-eb95-470f-8f02-55ad79822cb5"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.065248 4961 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-kubelet-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.065264 4961 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/db9da7f5-eb95-470f-8f02-55ad79822cb5-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.070201 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/db9da7f5-eb95-470f-8f02-55ad79822cb5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "db9da7f5-eb95-470f-8f02-55ad79822cb5" (UID: "db9da7f5-eb95-470f-8f02-55ad79822cb5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.166132 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/db9da7f5-eb95-470f-8f02-55ad79822cb5-kube-api-access\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.553274 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.554230 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.554842 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.555270 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.555802 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.556067 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.556388 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.556681 4961 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.556967 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.557277 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.557671 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.671699 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.671802 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.671827 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.671855 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.671953 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.672052 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.672199 4961 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.672250 4961 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.672262 4961 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.730741 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.731749 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.731790 4961 scope.go:117] "RemoveContainer" containerID="b4d9ccea9edd6d224cb06e9495b30a28cf0ed1a0431815967c46488bd5e263d9" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.734320 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.734339 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"db9da7f5-eb95-470f-8f02-55ad79822cb5","Type":"ContainerDied","Data":"5931a56c10a76928228cb59e934a278144aa808c780982b55483b05c40a4fef5"} Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.734404 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5931a56c10a76928228cb59e934a278144aa808c780982b55483b05c40a4fef5" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.735274 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.735813 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.736324 4961 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.736711 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.737236 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.737599 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.737962 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.738338 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.738666 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.749418 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.750300 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.750702 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.751080 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.751531 4961 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.751959 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.752506 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.752687 4961 scope.go:117] "RemoveContainer" containerID="e1d63c953107b4c151f62e3727e26885bfa8bb185285d2263c508e994d515548" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.752953 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.753691 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.757822 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.758295 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.758722 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.759194 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.759602 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.760433 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.761481 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.761998 4961 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.762437 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.770576 4961 scope.go:117] "RemoveContainer" containerID="f7f4dc872f0fbf39da8a707dd245970583d94368d3fc2cd486c5ff1b81a6d998" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.796909 4961 scope.go:117] "RemoveContainer" containerID="c0c09591d5d73277062f0d493bcc0476917b982f5714c89a4a6d8253e1b629f0" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.813822 4961 scope.go:117] "RemoveContainer" containerID="5289dcfc7d505b76ab4ba320193087bf75b3b19d00744ed5bd59956a11a97457" Dec 05 17:37:19 crc kubenswrapper[4961]: I1205 17:37:19.828946 4961 scope.go:117] "RemoveContainer" containerID="52036c8badc12a1121c9266bc43522c89a102d73ec371c3785f91dd92b5701ec" Dec 05 17:37:20 crc kubenswrapper[4961]: I1205 17:37:20.871354 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Dec 05 17:37:21 crc kubenswrapper[4961]: E1205 17:37:21.436131 4961 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.17:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.187e625797ae4cdd openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-12-05 17:37:12.322272477 +0000 UTC m=+238.383422950,LastTimestamp:2025-12-05 17:37:12.322272477 +0000 UTC m=+238.383422950,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Dec 05 17:37:21 crc kubenswrapper[4961]: E1205 17:37:21.858096 4961 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.17:6443: connect: connection refused" interval="6.4s" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.862969 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.864445 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.864865 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.865207 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.865669 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.866356 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.866712 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.867387 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.867885 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.888955 4961 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.888989 4961 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:22 crc kubenswrapper[4961]: E1205 17:37:22.889519 4961 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:22 crc kubenswrapper[4961]: I1205 17:37:22.890336 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:22 crc kubenswrapper[4961]: W1205 17:37:22.909490 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-3324486047e626e890caf7b61021de9623d2252f9aa3ddc0f3180f9db80fdce0 WatchSource:0}: Error finding container 3324486047e626e890caf7b61021de9623d2252f9aa3ddc0f3180f9db80fdce0: Status 404 returned error can't find the container with id 3324486047e626e890caf7b61021de9623d2252f9aa3ddc0f3180f9db80fdce0 Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.765088 4961 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="e1799cf9c9afc7597c2cece8010fe74190d265dcd762d4c307576897beded87c" exitCode=0 Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.765195 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"e1799cf9c9afc7597c2cece8010fe74190d265dcd762d4c307576897beded87c"} Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.765481 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"3324486047e626e890caf7b61021de9623d2252f9aa3ddc0f3180f9db80fdce0"} Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.766519 4961 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.766592 4961 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.766871 4961 status_manager.go:851] "Failed to get status for pod" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" pod="openshift-marketplace/community-operators-hdlff" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-hdlff\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: E1205 17:37:23.767093 4961 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.767288 4961 status_manager.go:851] "Failed to get status for pod" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" pod="openshift-marketplace/community-operators-kgvwm" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/community-operators-kgvwm\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.768152 4961 status_manager.go:851] "Failed to get status for pod" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.768574 4961 status_manager.go:851] "Failed to get status for pod" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" pod="openshift-marketplace/certified-operators-c5wk7" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-c5wk7\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.768877 4961 status_manager.go:851] "Failed to get status for pod" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" pod="openshift-marketplace/certified-operators-7wrmp" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-7wrmp\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.769103 4961 status_manager.go:851] "Failed to get status for pod" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" pod="openshift-marketplace/redhat-operators-4rgn2" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-4rgn2\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.769438 4961 status_manager.go:851] "Failed to get status for pod" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" pod="openshift-marketplace/redhat-operators-2vgjf" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-operators-2vgjf\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:23 crc kubenswrapper[4961]: I1205 17:37:23.769876 4961 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.17:6443: connect: connection refused" Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.780019 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"00993c159b4e4a80cfef42a997c300ef786ccfd65fb1815e559f74ed688b50ff"} Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.780316 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"bfda46d4de1743eb6cc2d0cea7600d802a59428736d2f84aced49329a2fa24a3"} Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.780327 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c4bfede6ca3ddd00bda65afd72769879c57f373f9599b42147e708aceece2c0f"} Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.780336 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"425169b0fb843e21e8a153cd8e6e94e3f70f921c0cfc896a638ba2b06c25ea95"} Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.783856 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.783903 4961 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857" exitCode=1 Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.783927 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857"} Dec 05 17:37:24 crc kubenswrapper[4961]: I1205 17:37:24.784374 4961 scope.go:117] "RemoveContainer" containerID="0050f786d3e166468a7ca0cacbc097a87440e3cb9c3cf9b39a9cac82e1ad4857" Dec 05 17:37:25 crc kubenswrapper[4961]: I1205 17:37:25.797860 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"7fb243a77cbb1ce7d46716740904270efcac670cc2c9f76d50e66ab752eb9031"} Dec 05 17:37:25 crc kubenswrapper[4961]: I1205 17:37:25.799560 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:25 crc kubenswrapper[4961]: I1205 17:37:25.800075 4961 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:25 crc kubenswrapper[4961]: I1205 17:37:25.800360 4961 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:25 crc kubenswrapper[4961]: I1205 17:37:25.806520 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Dec 05 17:37:25 crc kubenswrapper[4961]: I1205 17:37:25.806614 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"3c3a418ea9feabcd5c370f4e080c72e3deda3149826723b4e1165d836acd098c"} Dec 05 17:37:27 crc kubenswrapper[4961]: I1205 17:37:27.890498 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:27 crc kubenswrapper[4961]: I1205 17:37:27.890570 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:27 crc kubenswrapper[4961]: I1205 17:37:27.901006 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:29 crc kubenswrapper[4961]: I1205 17:37:29.165261 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:37:29 crc kubenswrapper[4961]: I1205 17:37:29.169186 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:37:29 crc kubenswrapper[4961]: I1205 17:37:29.828982 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:37:30 crc kubenswrapper[4961]: I1205 17:37:30.813207 4961 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:30 crc kubenswrapper[4961]: I1205 17:37:30.880813 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" podUID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" containerName="oauth-openshift" containerID="cri-o://2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd" gracePeriod=15 Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.279758 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335099 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335164 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4p96n\" (UniqueName: \"kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335204 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335237 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335263 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335285 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335308 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335339 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335371 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335410 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335436 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335472 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335496 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-dir\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.335542 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") pod \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\" (UID: \"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6\") " Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.337526 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.337877 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.338225 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.338840 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.338872 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.344345 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.344836 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.344925 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n" (OuterVolumeSpecName: "kube-api-access-4p96n") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "kube-api-access-4p96n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.345220 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.345427 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.345610 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.346462 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.346608 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.347295 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" (UID: "dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436751 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436827 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436842 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436853 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436862 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436871 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436879 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436901 4961 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436912 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436925 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436937 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4p96n\" (UniqueName: \"kubernetes.io/projected/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-kube-api-access-4p96n\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436946 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436954 4961 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.436963 4961 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6-audit-policies\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842003 4961 generic.go:334] "Generic (PLEG): container finished" podID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" containerID="2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd" exitCode=0 Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842061 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" event={"ID":"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6","Type":"ContainerDied","Data":"2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd"} Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842114 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" event={"ID":"dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6","Type":"ContainerDied","Data":"97ba11d768b63cc0a8d45cbf504687cdbd76cad95f205c652c74066255f0b63d"} Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842140 4961 scope.go:117] "RemoveContainer" containerID="2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842171 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-f9gdg" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842598 4961 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.842653 4961 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.848552 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.860280 4961 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cb0dd0b6-b7f8-4f38-b79a-9dd297da1824" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.869305 4961 scope.go:117] "RemoveContainer" containerID="2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd" Dec 05 17:37:31 crc kubenswrapper[4961]: E1205 17:37:31.869842 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd\": container with ID starting with 2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd not found: ID does not exist" containerID="2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd" Dec 05 17:37:31 crc kubenswrapper[4961]: I1205 17:37:31.869905 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd"} err="failed to get container status \"2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd\": rpc error: code = NotFound desc = could not find container \"2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd\": container with ID starting with 2bec6009f0beecc0f250fe63c9f52737688cbbe1d03efe3aa0829c814f5412cd not found: ID does not exist" Dec 05 17:37:32 crc kubenswrapper[4961]: I1205 17:37:32.853577 4961 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:32 crc kubenswrapper[4961]: I1205 17:37:32.853613 4961 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="d176b9c3-755b-47b4-a7dd-a709873feaa8" Dec 05 17:37:34 crc kubenswrapper[4961]: I1205 17:37:34.912357 4961 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="cb0dd0b6-b7f8-4f38-b79a-9dd297da1824" Dec 05 17:37:36 crc kubenswrapper[4961]: I1205 17:37:36.962852 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:37:37 crc kubenswrapper[4961]: I1205 17:37:37.108732 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Dec 05 17:37:37 crc kubenswrapper[4961]: I1205 17:37:37.225345 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Dec 05 17:37:37 crc kubenswrapper[4961]: I1205 17:37:37.339939 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Dec 05 17:37:37 crc kubenswrapper[4961]: I1205 17:37:37.932920 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Dec 05 17:37:38 crc kubenswrapper[4961]: I1205 17:37:38.784266 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Dec 05 17:37:39 crc kubenswrapper[4961]: I1205 17:37:39.118653 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Dec 05 17:37:39 crc kubenswrapper[4961]: I1205 17:37:39.480619 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Dec 05 17:37:39 crc kubenswrapper[4961]: I1205 17:37:39.914548 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Dec 05 17:37:40 crc kubenswrapper[4961]: I1205 17:37:40.891697 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Dec 05 17:37:40 crc kubenswrapper[4961]: I1205 17:37:40.893893 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Dec 05 17:37:40 crc kubenswrapper[4961]: I1205 17:37:40.965662 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Dec 05 17:37:41 crc kubenswrapper[4961]: I1205 17:37:41.668539 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.049419 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.087926 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.136977 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.219435 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.359390 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.572839 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.650239 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Dec 05 17:37:42 crc kubenswrapper[4961]: I1205 17:37:42.930743 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.048420 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.139282 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.227416 4961 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.349149 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.359719 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.663874 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.831429 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Dec 05 17:37:43 crc kubenswrapper[4961]: I1205 17:37:43.983209 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:37:44 crc kubenswrapper[4961]: I1205 17:37:44.308913 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Dec 05 17:37:44 crc kubenswrapper[4961]: I1205 17:37:44.315027 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Dec 05 17:37:44 crc kubenswrapper[4961]: I1205 17:37:44.389220 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Dec 05 17:37:44 crc kubenswrapper[4961]: I1205 17:37:44.847736 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Dec 05 17:37:44 crc kubenswrapper[4961]: I1205 17:37:44.891105 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.147391 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.215844 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.294613 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.333542 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.419720 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.432013 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.457800 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.575338 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.607729 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.634093 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.780109 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.786832 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.810278 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:37:45 crc kubenswrapper[4961]: I1205 17:37:45.917751 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.112232 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.158517 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.210475 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.267530 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.289910 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.658646 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.769079 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.779763 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.908227 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.935074 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.949033 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Dec 05 17:37:46 crc kubenswrapper[4961]: I1205 17:37:46.993014 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.176327 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.205446 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.285293 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.320466 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.353747 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.359506 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.460924 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.623518 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.674530 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.699673 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.783081 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.787043 4961 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.789798 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=36.789765009999996 podStartE2EDuration="36.78976501s" podCreationTimestamp="2025-12-05 17:37:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:30.47774848 +0000 UTC m=+256.538898943" watchObservedRunningTime="2025-12-05 17:37:47.78976501 +0000 UTC m=+273.850915483" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.791066 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-f9gdg","openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.791117 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.794960 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.810224 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=17.810198226 podStartE2EDuration="17.810198226s" podCreationTimestamp="2025-12-05 17:37:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:37:47.806807138 +0000 UTC m=+273.867957651" watchObservedRunningTime="2025-12-05 17:37:47.810198226 +0000 UTC m=+273.871348699" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.857394 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.889585 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.919254 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Dec 05 17:37:47 crc kubenswrapper[4961]: I1205 17:37:47.932093 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.039891 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.128511 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.215528 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.231604 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.374714 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.377303 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.454626 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.468912 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.490455 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.520420 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.553536 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.579503 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.610969 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.623336 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.702516 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.872255 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" path="/var/lib/kubelet/pods/dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6/volumes" Dec 05 17:37:48 crc kubenswrapper[4961]: I1205 17:37:48.933755 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.000461 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.010712 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.024409 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.059423 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.098848 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.112652 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.296209 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.336894 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.339799 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.349062 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.446876 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.452406 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.487639 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.647669 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.680267 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.726351 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.789850 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.843215 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Dec 05 17:37:49 crc kubenswrapper[4961]: I1205 17:37:49.887588 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.000590 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.444417 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.643208 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.651685 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.658686 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.661085 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.708761 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.734233 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.747585 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.773509 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.827167 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.899104 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Dec 05 17:37:50 crc kubenswrapper[4961]: I1205 17:37:50.909957 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.063683 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.340676 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.456091 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.510069 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.513484 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.613166 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.736094 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.753865 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.781242 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.783320 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.821359 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.846683 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.897156 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Dec 05 17:37:51 crc kubenswrapper[4961]: I1205 17:37:51.951926 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.158503 4961 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.162600 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.182821 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.320947 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.373191 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.377627 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.470021 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.633960 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.644910 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.669250 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.837823 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.938272 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.974129 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Dec 05 17:37:52 crc kubenswrapper[4961]: I1205 17:37:52.985195 4961 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.042991 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.053158 4961 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.053378 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f" gracePeriod=5 Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.085162 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.135195 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.150332 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.158409 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.181212 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.195121 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.283706 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.339828 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.413708 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.483342 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.550395 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.569207 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.635428 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.651417 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.848551 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.849534 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.889265 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.924394 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.948967 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:37:53 crc kubenswrapper[4961]: I1205 17:37:53.985509 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.029169 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.089996 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.241704 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.289159 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.321685 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.344084 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.544920 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.674092 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.736858 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.763552 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.859701 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.938368 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.948235 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Dec 05 17:37:54 crc kubenswrapper[4961]: I1205 17:37:54.976115 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.027409 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.046866 4961 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.111406 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.111406 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.121939 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.179642 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.291229 4961 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.409397 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.468000 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.552105 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.576667 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.878640 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Dec 05 17:37:55 crc kubenswrapper[4961]: I1205 17:37:55.999093 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.089712 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.227166 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.244562 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.257595 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.280204 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.346298 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.352744 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.483905 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.533600 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.658464 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.752319 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.753469 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.792477 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Dec 05 17:37:56 crc kubenswrapper[4961]: I1205 17:37:56.856953 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.141133 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.236381 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.271639 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.555867 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.569977 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.616111 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.620348 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.633570 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.684937 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.695629 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.773915 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Dec 05 17:37:57 crc kubenswrapper[4961]: I1205 17:37:57.992401 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.033637 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.465669 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.531454 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.624037 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.624137 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.679594 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.679721 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.679759 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.679814 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.679838 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.680119 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.680157 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.685858 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.685925 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.688444 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.781023 4961 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.781058 4961 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.781068 4961 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.781078 4961 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.781087 4961 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.869956 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.870446 4961 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.882154 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.882207 4961 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="10c4c033-4dd1-4fa0-953e-0a58762750e0" Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.888009 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Dec 05 17:37:58 crc kubenswrapper[4961]: I1205 17:37:58.888356 4961 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="10c4c033-4dd1-4fa0-953e-0a58762750e0" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.000885 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.000936 4961 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f" exitCode=137 Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.000979 4961 scope.go:117] "RemoveContainer" containerID="2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.001000 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.016688 4961 scope.go:117] "RemoveContainer" containerID="2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f" Dec 05 17:37:59 crc kubenswrapper[4961]: E1205 17:37:59.017074 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f\": container with ID starting with 2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f not found: ID does not exist" containerID="2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.017101 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f"} err="failed to get container status \"2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f\": rpc error: code = NotFound desc = could not find container \"2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f\": container with ID starting with 2df59cecd9b5083f8bf4290fe0d92a35b3fc3113597695e33aa71d9ff290995f not found: ID does not exist" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.120947 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.323327 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.328530 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.588077 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.606558 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Dec 05 17:37:59 crc kubenswrapper[4961]: I1205 17:37:59.696902 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.413347 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.917093 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.930909 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-6f67d677dd-fw96v"] Dec 05 17:38:00 crc kubenswrapper[4961]: E1205 17:38:00.931180 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" containerName="installer" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931201 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" containerName="installer" Dec 05 17:38:00 crc kubenswrapper[4961]: E1205 17:38:00.931211 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931218 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 17:38:00 crc kubenswrapper[4961]: E1205 17:38:00.931230 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" containerName="oauth-openshift" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931236 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" containerName="oauth-openshift" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931344 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd3acfc8-817d-499f-b2c9-d9e3d1c9d7c6" containerName="oauth-openshift" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931356 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="db9da7f5-eb95-470f-8f02-55ad79822cb5" containerName="installer" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931365 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.931766 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.935430 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.936038 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.938888 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.938920 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.938924 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.938944 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.938965 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.939045 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.943265 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.947378 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.948357 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.949552 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.956742 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6f67d677dd-fw96v"] Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.957619 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.962167 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Dec 05 17:38:00 crc kubenswrapper[4961]: I1205 17:38:00.966801 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.009282 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.009586 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-login\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.009692 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kzmbf\" (UniqueName: \"kubernetes.io/projected/292177cf-7519-4c32-8784-77d7af5b99bb-kube-api-access-kzmbf\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.009819 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.009920 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010011 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-session\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010103 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/292177cf-7519-4c32-8784-77d7af5b99bb-audit-dir\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010177 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010256 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010367 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010456 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010566 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-audit-policies\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010670 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-error\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.010809 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112673 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112746 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112798 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-login\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112824 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kzmbf\" (UniqueName: \"kubernetes.io/projected/292177cf-7519-4c32-8784-77d7af5b99bb-kube-api-access-kzmbf\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112843 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112865 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112887 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-session\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112903 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/292177cf-7519-4c32-8784-77d7af5b99bb-audit-dir\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112919 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112940 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112970 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.112991 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.113016 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-audit-policies\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.113036 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-error\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.114697 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-service-ca\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.114995 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-cliconfig\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.115275 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-audit-policies\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.115348 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/292177cf-7519-4c32-8784-77d7af5b99bb-audit-dir\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.116495 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.117695 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-error\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.117860 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-session\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.117957 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.118693 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.123148 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.123289 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-user-template-login\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.123536 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-router-certs\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.124415 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/292177cf-7519-4c32-8784-77d7af5b99bb-v4-0-config-system-serving-cert\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.129488 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kzmbf\" (UniqueName: \"kubernetes.io/projected/292177cf-7519-4c32-8784-77d7af5b99bb-kube-api-access-kzmbf\") pod \"oauth-openshift-6f67d677dd-fw96v\" (UID: \"292177cf-7519-4c32-8784-77d7af5b99bb\") " pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.246976 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.258327 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Dec 05 17:38:01 crc kubenswrapper[4961]: I1205 17:38:01.661600 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-6f67d677dd-fw96v"] Dec 05 17:38:02 crc kubenswrapper[4961]: I1205 17:38:02.022704 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" event={"ID":"292177cf-7519-4c32-8784-77d7af5b99bb","Type":"ContainerStarted","Data":"8b6526cebf8dddfc12053d20656fe172a1d635ec6928c10af43222f968e2e006"} Dec 05 17:38:02 crc kubenswrapper[4961]: I1205 17:38:02.022769 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" event={"ID":"292177cf-7519-4c32-8784-77d7af5b99bb","Type":"ContainerStarted","Data":"c8b4da804535de9c866107364cd71ff19116b73af2388dd3043ff52b84a13c38"} Dec 05 17:38:02 crc kubenswrapper[4961]: I1205 17:38:02.023077 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:02 crc kubenswrapper[4961]: I1205 17:38:02.047844 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" podStartSLOduration=57.047761939 podStartE2EDuration="57.047761939s" podCreationTimestamp="2025-12-05 17:37:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:02.040863052 +0000 UTC m=+288.102013535" watchObservedRunningTime="2025-12-05 17:38:02.047761939 +0000 UTC m=+288.108912482" Dec 05 17:38:02 crc kubenswrapper[4961]: I1205 17:38:02.110210 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-6f67d677dd-fw96v" Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.954304 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7wrmp"] Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.955065 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-7wrmp" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="registry-server" containerID="cri-o://6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9" gracePeriod=30 Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.958016 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c5wk7"] Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.958358 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-c5wk7" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="registry-server" containerID="cri-o://39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6" gracePeriod=30 Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.971628 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hdlff"] Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.977468 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-hdlff" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="registry-server" containerID="cri-o://f45d8913d22c5cf2feaf37342b571d9c407a87e665527de862aef9bbf04201b3" gracePeriod=30 Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.977516 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kgvwm"] Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.977934 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-kgvwm" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="registry-server" containerID="cri-o://fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d" gracePeriod=30 Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.990898 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c4krp"] Dec 05 17:38:12 crc kubenswrapper[4961]: I1205 17:38:12.991165 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" podUID="3733b15a-cd45-418e-9452-79a33535ce35" containerName="marketplace-operator" containerID="cri-o://3e79ab8c02f5039d8fb9f1d8d54f3135d72ab16ace4041ebc8586eba0e199646" gracePeriod=30 Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.003288 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.007889 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.007965 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2djc5"] Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.008282 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2djc5" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="registry-server" containerID="cri-o://c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0" gracePeriod=30 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.011597 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2vgjf"] Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.011884 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-2vgjf" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="registry-server" containerID="cri-o://f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca" gracePeriod=30 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.017176 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kxwjj"] Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.018114 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.021946 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.022014 4961 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-marketplace/community-operators-kgvwm" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="registry-server" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.027041 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4rgn2"] Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.027337 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-4rgn2" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="registry-server" containerID="cri-o://b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76" gracePeriod=30 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.033843 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kxwjj"] Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.077006 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb5a393d-4029-4474-937c-3ddf348254f2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.077106 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4lsjd\" (UniqueName: \"kubernetes.io/projected/bb5a393d-4029-4474-937c-3ddf348254f2-kube-api-access-4lsjd\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.077252 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb5a393d-4029-4474-937c-3ddf348254f2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.115672 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9 is running failed: container process not found" containerID="6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.116192 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9 is running failed: container process not found" containerID="6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.116860 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9 is running failed: container process not found" containerID="6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.116938 4961 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-7wrmp" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="registry-server" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.119340 4961 generic.go:334] "Generic (PLEG): container finished" podID="3733b15a-cd45-418e-9452-79a33535ce35" containerID="3e79ab8c02f5039d8fb9f1d8d54f3135d72ab16ace4041ebc8586eba0e199646" exitCode=0 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.119429 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" event={"ID":"3733b15a-cd45-418e-9452-79a33535ce35","Type":"ContainerDied","Data":"3e79ab8c02f5039d8fb9f1d8d54f3135d72ab16ace4041ebc8586eba0e199646"} Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.122501 4961 generic.go:334] "Generic (PLEG): container finished" podID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerID="f45d8913d22c5cf2feaf37342b571d9c407a87e665527de862aef9bbf04201b3" exitCode=0 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.122570 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerDied","Data":"f45d8913d22c5cf2feaf37342b571d9c407a87e665527de862aef9bbf04201b3"} Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.125273 4961 generic.go:334] "Generic (PLEG): container finished" podID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerID="6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9" exitCode=0 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.125369 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerDied","Data":"6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9"} Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.133990 4961 generic.go:334] "Generic (PLEG): container finished" podID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerID="39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6" exitCode=0 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.134097 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wk7" event={"ID":"eb4981fe-4b26-4626-8c45-ba311dc825d9","Type":"ContainerDied","Data":"39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6"} Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.139865 4961 generic.go:334] "Generic (PLEG): container finished" podID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerID="fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d" exitCode=0 Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.139907 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerDied","Data":"fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d"} Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.153969 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6 is running failed: container process not found" containerID="39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.154281 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6 is running failed: container process not found" containerID="39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.154544 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6 is running failed: container process not found" containerID="39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:38:13 crc kubenswrapper[4961]: E1205 17:38:13.154584 4961 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-c5wk7" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="registry-server" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.178255 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb5a393d-4029-4474-937c-3ddf348254f2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.178314 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb5a393d-4029-4474-937c-3ddf348254f2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.178342 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4lsjd\" (UniqueName: \"kubernetes.io/projected/bb5a393d-4029-4474-937c-3ddf348254f2-kube-api-access-4lsjd\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.181272 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bb5a393d-4029-4474-937c-3ddf348254f2-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.188817 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/bb5a393d-4029-4474-937c-3ddf348254f2-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.196641 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4lsjd\" (UniqueName: \"kubernetes.io/projected/bb5a393d-4029-4474-937c-3ddf348254f2-kube-api-access-4lsjd\") pod \"marketplace-operator-79b997595-kxwjj\" (UID: \"bb5a393d-4029-4474-937c-3ddf348254f2\") " pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.562534 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.566756 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.569898 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.573683 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.580597 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.595524 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.602422 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.605402 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.627110 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685680 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-utilities\") pod \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685718 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-catalog-content\") pod \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685740 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-trusted-ca\") pod \"3733b15a-cd45-418e-9452-79a33535ce35\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685761 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-catalog-content\") pod \"eb4981fe-4b26-4626-8c45-ba311dc825d9\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685820 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-utilities\") pod \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685840 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-utilities\") pod \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685872 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-utilities\") pod \"eb4981fe-4b26-4626-8c45-ba311dc825d9\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685904 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-utilities\") pod \"39220eba-93d5-4e82-89a9-97d1383522a4\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685927 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-operator-metrics\") pod \"3733b15a-cd45-418e-9452-79a33535ce35\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685953 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-catalog-content\") pod \"39220eba-93d5-4e82-89a9-97d1383522a4\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685972 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-catalog-content\") pod \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.685987 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-catalog-content\") pod \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686013 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-catalog-content\") pod \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686035 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8x959\" (UniqueName: \"kubernetes.io/projected/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-kube-api-access-8x959\") pod \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686068 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jjb9\" (UniqueName: \"kubernetes.io/projected/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-kube-api-access-7jjb9\") pod \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\" (UID: \"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686103 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-utilities\") pod \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686129 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jbhk6\" (UniqueName: \"kubernetes.io/projected/3733b15a-cd45-418e-9452-79a33535ce35-kube-api-access-jbhk6\") pod \"3733b15a-cd45-418e-9452-79a33535ce35\" (UID: \"3733b15a-cd45-418e-9452-79a33535ce35\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686146 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xg48n\" (UniqueName: \"kubernetes.io/projected/39220eba-93d5-4e82-89a9-97d1383522a4-kube-api-access-xg48n\") pod \"39220eba-93d5-4e82-89a9-97d1383522a4\" (UID: \"39220eba-93d5-4e82-89a9-97d1383522a4\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686179 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-utilities\") pod \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686200 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-catalog-content\") pod \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\" (UID: \"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686238 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b877\" (UniqueName: \"kubernetes.io/projected/eb4981fe-4b26-4626-8c45-ba311dc825d9-kube-api-access-6b877\") pod \"eb4981fe-4b26-4626-8c45-ba311dc825d9\" (UID: \"eb4981fe-4b26-4626-8c45-ba311dc825d9\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686322 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-45prm\" (UniqueName: \"kubernetes.io/projected/af3d9725-8492-4a3a-b125-a545b2d4c8c7-kube-api-access-45prm\") pod \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\" (UID: \"af3d9725-8492-4a3a-b125-a545b2d4c8c7\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686343 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-z7khz\" (UniqueName: \"kubernetes.io/projected/6ae1ad4f-caa6-49b9-9d32-6905088903bf-kube-api-access-z7khz\") pod \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\" (UID: \"6ae1ad4f-caa6-49b9-9d32-6905088903bf\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.686367 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-crfnl\" (UniqueName: \"kubernetes.io/projected/4ab60ade-3b80-4731-9fa7-09c77a0fa666-kube-api-access-crfnl\") pod \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\" (UID: \"4ab60ade-3b80-4731-9fa7-09c77a0fa666\") " Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.689702 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-utilities" (OuterVolumeSpecName: "utilities") pod "6ae1ad4f-caa6-49b9-9d32-6905088903bf" (UID: "6ae1ad4f-caa6-49b9-9d32-6905088903bf"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.690114 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-utilities" (OuterVolumeSpecName: "utilities") pod "4ab60ade-3b80-4731-9fa7-09c77a0fa666" (UID: "4ab60ade-3b80-4731-9fa7-09c77a0fa666"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.690873 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-utilities" (OuterVolumeSpecName: "utilities") pod "eb4981fe-4b26-4626-8c45-ba311dc825d9" (UID: "eb4981fe-4b26-4626-8c45-ba311dc825d9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.691533 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "3733b15a-cd45-418e-9452-79a33535ce35" (UID: "3733b15a-cd45-418e-9452-79a33535ce35"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.691595 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-utilities" (OuterVolumeSpecName: "utilities") pod "39220eba-93d5-4e82-89a9-97d1383522a4" (UID: "39220eba-93d5-4e82-89a9-97d1383522a4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.692045 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-utilities" (OuterVolumeSpecName: "utilities") pod "af3d9725-8492-4a3a-b125-a545b2d4c8c7" (UID: "af3d9725-8492-4a3a-b125-a545b2d4c8c7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.694466 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-utilities" (OuterVolumeSpecName: "utilities") pod "81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" (UID: "81f0c1f1-bb8e-4ae4-a87b-460ec2f12258"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.696378 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "3733b15a-cd45-418e-9452-79a33535ce35" (UID: "3733b15a-cd45-418e-9452-79a33535ce35"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.696399 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-kube-api-access-8x959" (OuterVolumeSpecName: "kube-api-access-8x959") pod "1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" (UID: "1a8bb3a1-6049-4a0e-8d86-3ddd148686b9"). InnerVolumeSpecName "kube-api-access-8x959". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.697180 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-utilities" (OuterVolumeSpecName: "utilities") pod "1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" (UID: "1a8bb3a1-6049-4a0e-8d86-3ddd148686b9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.700182 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ab60ade-3b80-4731-9fa7-09c77a0fa666-kube-api-access-crfnl" (OuterVolumeSpecName: "kube-api-access-crfnl") pod "4ab60ade-3b80-4731-9fa7-09c77a0fa666" (UID: "4ab60ade-3b80-4731-9fa7-09c77a0fa666"). InnerVolumeSpecName "kube-api-access-crfnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.700318 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-kube-api-access-7jjb9" (OuterVolumeSpecName: "kube-api-access-7jjb9") pod "81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" (UID: "81f0c1f1-bb8e-4ae4-a87b-460ec2f12258"). InnerVolumeSpecName "kube-api-access-7jjb9". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.700578 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/af3d9725-8492-4a3a-b125-a545b2d4c8c7-kube-api-access-45prm" (OuterVolumeSpecName: "kube-api-access-45prm") pod "af3d9725-8492-4a3a-b125-a545b2d4c8c7" (UID: "af3d9725-8492-4a3a-b125-a545b2d4c8c7"). InnerVolumeSpecName "kube-api-access-45prm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.702482 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ae1ad4f-caa6-49b9-9d32-6905088903bf-kube-api-access-z7khz" (OuterVolumeSpecName: "kube-api-access-z7khz") pod "6ae1ad4f-caa6-49b9-9d32-6905088903bf" (UID: "6ae1ad4f-caa6-49b9-9d32-6905088903bf"). InnerVolumeSpecName "kube-api-access-z7khz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.707116 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39220eba-93d5-4e82-89a9-97d1383522a4-kube-api-access-xg48n" (OuterVolumeSpecName: "kube-api-access-xg48n") pod "39220eba-93d5-4e82-89a9-97d1383522a4" (UID: "39220eba-93d5-4e82-89a9-97d1383522a4"). InnerVolumeSpecName "kube-api-access-xg48n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.708871 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb4981fe-4b26-4626-8c45-ba311dc825d9-kube-api-access-6b877" (OuterVolumeSpecName: "kube-api-access-6b877") pod "eb4981fe-4b26-4626-8c45-ba311dc825d9" (UID: "eb4981fe-4b26-4626-8c45-ba311dc825d9"). InnerVolumeSpecName "kube-api-access-6b877". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.710915 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3733b15a-cd45-418e-9452-79a33535ce35-kube-api-access-jbhk6" (OuterVolumeSpecName: "kube-api-access-jbhk6") pod "3733b15a-cd45-418e-9452-79a33535ce35" (UID: "3733b15a-cd45-418e-9452-79a33535ce35"). InnerVolumeSpecName "kube-api-access-jbhk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.749375 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "39220eba-93d5-4e82-89a9-97d1383522a4" (UID: "39220eba-93d5-4e82-89a9-97d1383522a4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787709 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b877\" (UniqueName: \"kubernetes.io/projected/eb4981fe-4b26-4626-8c45-ba311dc825d9-kube-api-access-6b877\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787738 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-45prm\" (UniqueName: \"kubernetes.io/projected/af3d9725-8492-4a3a-b125-a545b2d4c8c7-kube-api-access-45prm\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787747 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-z7khz\" (UniqueName: \"kubernetes.io/projected/6ae1ad4f-caa6-49b9-9d32-6905088903bf-kube-api-access-z7khz\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787756 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-crfnl\" (UniqueName: \"kubernetes.io/projected/4ab60ade-3b80-4731-9fa7-09c77a0fa666-kube-api-access-crfnl\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787765 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787798 4961 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787811 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787821 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787830 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787838 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787846 4961 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/3733b15a-cd45-418e-9452-79a33535ce35-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787855 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/39220eba-93d5-4e82-89a9-97d1383522a4-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787862 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8x959\" (UniqueName: \"kubernetes.io/projected/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-kube-api-access-8x959\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787870 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jjb9\" (UniqueName: \"kubernetes.io/projected/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-kube-api-access-7jjb9\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787878 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787886 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jbhk6\" (UniqueName: \"kubernetes.io/projected/3733b15a-cd45-418e-9452-79a33535ce35-kube-api-access-jbhk6\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787894 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xg48n\" (UniqueName: \"kubernetes.io/projected/39220eba-93d5-4e82-89a9-97d1383522a4-kube-api-access-xg48n\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.787902 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.792963 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb4981fe-4b26-4626-8c45-ba311dc825d9" (UID: "eb4981fe-4b26-4626-8c45-ba311dc825d9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.805909 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "af3d9725-8492-4a3a-b125-a545b2d4c8c7" (UID: "af3d9725-8492-4a3a-b125-a545b2d4c8c7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.815528 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" (UID: "1a8bb3a1-6049-4a0e-8d86-3ddd148686b9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.866279 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6ae1ad4f-caa6-49b9-9d32-6905088903bf" (UID: "6ae1ad4f-caa6-49b9-9d32-6905088903bf"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.883650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" (UID: "81f0c1f1-bb8e-4ae4-a87b-460ec2f12258"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.889231 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6ae1ad4f-caa6-49b9-9d32-6905088903bf-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.889265 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.889274 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/af3d9725-8492-4a3a-b125-a545b2d4c8c7-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.889283 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb4981fe-4b26-4626-8c45-ba311dc825d9-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.889293 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.904574 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ab60ade-3b80-4731-9fa7-09c77a0fa666" (UID: "4ab60ade-3b80-4731-9fa7-09c77a0fa666"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:38:13 crc kubenswrapper[4961]: I1205 17:38:13.990324 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ab60ade-3b80-4731-9fa7-09c77a0fa666-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.031364 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-kxwjj"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.145405 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" event={"ID":"bb5a393d-4029-4474-937c-3ddf348254f2","Type":"ContainerStarted","Data":"fefbb8472c1fa1120ff0fc282ec0f5dc65c1c90f00d2b9f4b44caf590e6de264"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.148053 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-7wrmp" event={"ID":"1a8bb3a1-6049-4a0e-8d86-3ddd148686b9","Type":"ContainerDied","Data":"c22451a68046fc921894d3c0c4d1adb1ed1f0d6763703b0e032a73354f1f72d5"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.148235 4961 scope.go:117] "RemoveContainer" containerID="6afc82d7da0f8598cc8dfb344fe854f19f767594aeb257ef05d225777babaac9" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.148122 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-7wrmp" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.156222 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-c5wk7" event={"ID":"eb4981fe-4b26-4626-8c45-ba311dc825d9","Type":"ContainerDied","Data":"2ce3953ecab7964b89db55b50b1985142774ae0ab54d34ab7f44c143025b113e"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.156412 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-c5wk7" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.158585 4961 generic.go:334] "Generic (PLEG): container finished" podID="39220eba-93d5-4e82-89a9-97d1383522a4" containerID="c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0" exitCode=0 Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.158652 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2djc5" event={"ID":"39220eba-93d5-4e82-89a9-97d1383522a4","Type":"ContainerDied","Data":"c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.158683 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2djc5" event={"ID":"39220eba-93d5-4e82-89a9-97d1383522a4","Type":"ContainerDied","Data":"21f64927db412b8795edeaaddcdc37ed23528e4c959b6dd5e04631b321349cc0"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.158798 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2djc5" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.161513 4961 generic.go:334] "Generic (PLEG): container finished" podID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerID="f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca" exitCode=0 Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.161565 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-2vgjf" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.161605 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerDied","Data":"f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.161639 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-2vgjf" event={"ID":"4ab60ade-3b80-4731-9fa7-09c77a0fa666","Type":"ContainerDied","Data":"36c7a194db65c9fac5439ad29418d0095870e4a5df373861cd28a9872b0d4d7b"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.164079 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-kgvwm" event={"ID":"6ae1ad4f-caa6-49b9-9d32-6905088903bf","Type":"ContainerDied","Data":"601aab3516033a5634d773845d0d202de78416c4ccfbc100cc0a28bd46368409"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.164189 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-kgvwm" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.166884 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" event={"ID":"3733b15a-cd45-418e-9452-79a33535ce35","Type":"ContainerDied","Data":"a7498208f6c94fcd820c326d3dd1855869fffbb467db6305014867224a006c58"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.167106 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-c4krp" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.173365 4961 scope.go:117] "RemoveContainer" containerID="99da141428358bbd14e7287820d44d32e97a23b15949f57b49553b81dbdc34c0" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.176287 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-hdlff" event={"ID":"af3d9725-8492-4a3a-b125-a545b2d4c8c7","Type":"ContainerDied","Data":"1606c93c4330eead7795a39c908cfd878e68bf8d2d50416613e52e0b56cc71ab"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.176753 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-hdlff" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.182623 4961 generic.go:334] "Generic (PLEG): container finished" podID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerID="b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76" exitCode=0 Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.182924 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerDied","Data":"b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.183201 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-4rgn2" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.189294 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-4rgn2" event={"ID":"81f0c1f1-bb8e-4ae4-a87b-460ec2f12258","Type":"ContainerDied","Data":"c1383dc0ded80127b334d1ddf2a0d30f4dc5bb34dce50cad013322abccad284a"} Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.201531 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-7wrmp"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.207538 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-7wrmp"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.214068 4961 scope.go:117] "RemoveContainer" containerID="84ea574183ec67be2dae60a8be478ef34819c26b18cfb8f50943eba6223caf95" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.220546 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-2vgjf"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.229444 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-2vgjf"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.235615 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2djc5"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.241289 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2djc5"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.246494 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c4krp"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.249397 4961 scope.go:117] "RemoveContainer" containerID="39c08015361db10f9eb9157a42e13952456652ef0a34141c20ec0420471e72b6" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.253300 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-c4krp"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.259057 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-c5wk7"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.265512 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-c5wk7"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.269583 4961 scope.go:117] "RemoveContainer" containerID="93a1ebbd31bc59273eba0da9e2415250afdb5c1d0a2e6cfa6046c8af5295d094" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.270389 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-4rgn2"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.275161 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-4rgn2"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.280059 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-hdlff"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.284406 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-hdlff"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.285013 4961 scope.go:117] "RemoveContainer" containerID="26109713ebb34eea5e0f7550c9731a7146072fb36351df0eb779f9c7d722f8d0" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.290236 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-kgvwm"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.294454 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-kgvwm"] Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.298872 4961 scope.go:117] "RemoveContainer" containerID="c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.315862 4961 scope.go:117] "RemoveContainer" containerID="7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.334810 4961 scope.go:117] "RemoveContainer" containerID="7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.349062 4961 scope.go:117] "RemoveContainer" containerID="c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.349513 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0\": container with ID starting with c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0 not found: ID does not exist" containerID="c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.349572 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0"} err="failed to get container status \"c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0\": rpc error: code = NotFound desc = could not find container \"c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0\": container with ID starting with c834e2400f8ee4f4d3dd207d99068eca1f2bddb016239a33514fc3a0b746e3c0 not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.349607 4961 scope.go:117] "RemoveContainer" containerID="7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.350077 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a\": container with ID starting with 7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a not found: ID does not exist" containerID="7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.350112 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a"} err="failed to get container status \"7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a\": rpc error: code = NotFound desc = could not find container \"7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a\": container with ID starting with 7e1b8f2ecc9d2ace40403ca0f859bd27bbc1f24dcdf7861e3b41d59b18fa6b0a not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.350143 4961 scope.go:117] "RemoveContainer" containerID="7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.350488 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf\": container with ID starting with 7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf not found: ID does not exist" containerID="7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.350521 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf"} err="failed to get container status \"7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf\": rpc error: code = NotFound desc = could not find container \"7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf\": container with ID starting with 7fc09f87a3cb995dec370db7788d867747044a618f12d653aaf8777d114942bf not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.350541 4961 scope.go:117] "RemoveContainer" containerID="f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.366761 4961 scope.go:117] "RemoveContainer" containerID="a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.382832 4961 scope.go:117] "RemoveContainer" containerID="d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.402260 4961 scope.go:117] "RemoveContainer" containerID="f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.402639 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca\": container with ID starting with f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca not found: ID does not exist" containerID="f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.402672 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca"} err="failed to get container status \"f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca\": rpc error: code = NotFound desc = could not find container \"f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca\": container with ID starting with f330a359f0dcf055cab81f830b377e95cde2afa6dff7c9bd2bdb3267afadceca not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.402695 4961 scope.go:117] "RemoveContainer" containerID="a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.402927 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2\": container with ID starting with a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2 not found: ID does not exist" containerID="a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.402948 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2"} err="failed to get container status \"a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2\": rpc error: code = NotFound desc = could not find container \"a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2\": container with ID starting with a1ad30ed9db60fe6e0dfa11f343cfacbdd2dc50c169d83ef664339c7994ef8e2 not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.402964 4961 scope.go:117] "RemoveContainer" containerID="d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.403343 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3\": container with ID starting with d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3 not found: ID does not exist" containerID="d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.403364 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3"} err="failed to get container status \"d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3\": rpc error: code = NotFound desc = could not find container \"d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3\": container with ID starting with d84234a5ed0342027a73742d4682be80852ba2da8d6483ed736c970dd31ae2e3 not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.403376 4961 scope.go:117] "RemoveContainer" containerID="fdada913005f0ed962a8aae89c22d907ee8248e0e740ec990da25b884cdc7b6d" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.417882 4961 scope.go:117] "RemoveContainer" containerID="57cc74eed4e87ecbf9382404a061301a4db36292cfe05c048c1e834570847d34" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.431005 4961 scope.go:117] "RemoveContainer" containerID="460fed82e92c0840ebc91e497b9f94acc5adcc1b2980d140c8075c6bdb250f52" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.444424 4961 scope.go:117] "RemoveContainer" containerID="3e79ab8c02f5039d8fb9f1d8d54f3135d72ab16ace4041ebc8586eba0e199646" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.457694 4961 scope.go:117] "RemoveContainer" containerID="f45d8913d22c5cf2feaf37342b571d9c407a87e665527de862aef9bbf04201b3" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.471515 4961 scope.go:117] "RemoveContainer" containerID="5b1983c716ff360bda4af8f00baedd8b64452ee0663836fb5e011c31884fa4ae" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.485601 4961 scope.go:117] "RemoveContainer" containerID="52f4767a07d47bd31ad777de4907e268a8be689dd6e8b4302b6986c97dafa48a" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.499910 4961 scope.go:117] "RemoveContainer" containerID="b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.511834 4961 scope.go:117] "RemoveContainer" containerID="c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.530090 4961 scope.go:117] "RemoveContainer" containerID="94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.544515 4961 scope.go:117] "RemoveContainer" containerID="b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.545059 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76\": container with ID starting with b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76 not found: ID does not exist" containerID="b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.545108 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76"} err="failed to get container status \"b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76\": rpc error: code = NotFound desc = could not find container \"b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76\": container with ID starting with b89276b287c0ee766626cc9dd93bf00997a01a5a639953c364d6990df5417f76 not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.545144 4961 scope.go:117] "RemoveContainer" containerID="c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.545537 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68\": container with ID starting with c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68 not found: ID does not exist" containerID="c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.545587 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68"} err="failed to get container status \"c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68\": rpc error: code = NotFound desc = could not find container \"c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68\": container with ID starting with c1de3f84c56bfe9648aa706574eef6a834c05b7f89b07ac889357628ab122c68 not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.545611 4961 scope.go:117] "RemoveContainer" containerID="94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f" Dec 05 17:38:14 crc kubenswrapper[4961]: E1205 17:38:14.546192 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f\": container with ID starting with 94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f not found: ID does not exist" containerID="94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.546224 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f"} err="failed to get container status \"94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f\": rpc error: code = NotFound desc = could not find container \"94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f\": container with ID starting with 94c603e837f73e29d968f111faeb443c1c0d5f5f54b467257d8df0e6bdb92e8f not found: ID does not exist" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.882723 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" path="/var/lib/kubelet/pods/1a8bb3a1-6049-4a0e-8d86-3ddd148686b9/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.886643 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3733b15a-cd45-418e-9452-79a33535ce35" path="/var/lib/kubelet/pods/3733b15a-cd45-418e-9452-79a33535ce35/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.887535 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" path="/var/lib/kubelet/pods/39220eba-93d5-4e82-89a9-97d1383522a4/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.888279 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" path="/var/lib/kubelet/pods/4ab60ade-3b80-4731-9fa7-09c77a0fa666/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.889606 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" path="/var/lib/kubelet/pods/6ae1ad4f-caa6-49b9-9d32-6905088903bf/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.890261 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" path="/var/lib/kubelet/pods/81f0c1f1-bb8e-4ae4-a87b-460ec2f12258/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.891344 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" path="/var/lib/kubelet/pods/af3d9725-8492-4a3a-b125-a545b2d4c8c7/volumes" Dec 05 17:38:14 crc kubenswrapper[4961]: I1205 17:38:14.892092 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" path="/var/lib/kubelet/pods/eb4981fe-4b26-4626-8c45-ba311dc825d9/volumes" Dec 05 17:38:15 crc kubenswrapper[4961]: I1205 17:38:15.189082 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" event={"ID":"bb5a393d-4029-4474-937c-3ddf348254f2","Type":"ContainerStarted","Data":"9b1a13427705a5ec6c78d1e45b3478e775161acfb91b0c3260b449cd17058ac4"} Dec 05 17:38:15 crc kubenswrapper[4961]: I1205 17:38:15.189506 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:15 crc kubenswrapper[4961]: I1205 17:38:15.192865 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" Dec 05 17:38:15 crc kubenswrapper[4961]: I1205 17:38:15.210838 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-kxwjj" podStartSLOduration=3.210823735 podStartE2EDuration="3.210823735s" podCreationTimestamp="2025-12-05 17:38:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:15.208113985 +0000 UTC m=+301.269264458" watchObservedRunningTime="2025-12-05 17:38:15.210823735 +0000 UTC m=+301.271974208" Dec 05 17:38:26 crc kubenswrapper[4961]: I1205 17:38:26.898741 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cc7mk"] Dec 05 17:38:26 crc kubenswrapper[4961]: I1205 17:38:26.899507 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" podUID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" containerName="controller-manager" containerID="cri-o://731d3a956876b7765d75b7f9328e4833245a4f186e07e83856ba148c6148369e" gracePeriod=30 Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.020769 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj"] Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.021000 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" podUID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" containerName="route-controller-manager" containerID="cri-o://40ac9277f765c64a6048308efb71c5191855576b3396ea888ee9dfd0fd56d1d3" gracePeriod=30 Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.257667 4961 generic.go:334] "Generic (PLEG): container finished" podID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" containerID="731d3a956876b7765d75b7f9328e4833245a4f186e07e83856ba148c6148369e" exitCode=0 Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.257761 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" event={"ID":"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb","Type":"ContainerDied","Data":"731d3a956876b7765d75b7f9328e4833245a4f186e07e83856ba148c6148369e"} Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.258071 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" event={"ID":"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb","Type":"ContainerDied","Data":"45f9678143c63a7a9d069817e4725d8d15bd7e4e8aefd2842d376edb801836ec"} Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.258088 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="45f9678143c63a7a9d069817e4725d8d15bd7e4e8aefd2842d376edb801836ec" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.269535 4961 generic.go:334] "Generic (PLEG): container finished" podID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" containerID="40ac9277f765c64a6048308efb71c5191855576b3396ea888ee9dfd0fd56d1d3" exitCode=0 Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.269577 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" event={"ID":"f9ea79ab-de4c-4165-82d4-84b9d73df5a4","Type":"ContainerDied","Data":"40ac9277f765c64a6048308efb71c5191855576b3396ea888ee9dfd0fd56d1d3"} Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.275813 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.381070 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.398036 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") pod \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.398094 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles\") pod \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.398164 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") pod \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.398192 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca\") pod \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.398259 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcpz7\" (UniqueName: \"kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7\") pod \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\" (UID: \"20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.399002 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca" (OuterVolumeSpecName: "client-ca") pod "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.399109 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config" (OuterVolumeSpecName: "config") pod "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.399439 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.405619 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.413750 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7" (OuterVolumeSpecName: "kube-api-access-xcpz7") pod "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" (UID: "20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb"). InnerVolumeSpecName "kube-api-access-xcpz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.499259 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") pod \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500010 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca" (OuterVolumeSpecName: "client-ca") pod "f9ea79ab-de4c-4165-82d4-84b9d73df5a4" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500125 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") pod \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500236 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-serving-cert\") pod \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500270 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vbxd4\" (UniqueName: \"kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4\") pod \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\" (UID: \"f9ea79ab-de4c-4165-82d4-84b9d73df5a4\") " Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500552 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config" (OuterVolumeSpecName: "config") pod "f9ea79ab-de4c-4165-82d4-84b9d73df5a4" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500643 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcpz7\" (UniqueName: \"kubernetes.io/projected/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-kube-api-access-xcpz7\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500659 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500673 4961 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500684 4961 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500694 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500703 4961 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.500713 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.503925 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f9ea79ab-de4c-4165-82d4-84b9d73df5a4" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.506852 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4" (OuterVolumeSpecName: "kube-api-access-vbxd4") pod "f9ea79ab-de4c-4165-82d4-84b9d73df5a4" (UID: "f9ea79ab-de4c-4165-82d4-84b9d73df5a4"). InnerVolumeSpecName "kube-api-access-vbxd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.601982 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:27 crc kubenswrapper[4961]: I1205 17:38:27.602029 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vbxd4\" (UniqueName: \"kubernetes.io/projected/f9ea79ab-de4c-4165-82d4-84b9d73df5a4-kube-api-access-vbxd4\") on node \"crc\" DevicePath \"\"" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.275629 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-cc7mk" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.275685 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" event={"ID":"f9ea79ab-de4c-4165-82d4-84b9d73df5a4","Type":"ContainerDied","Data":"86c1713abf615befc67e6621669df2ed90a8f686f756967a28ddd16f8a849e71"} Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.275808 4961 scope.go:117] "RemoveContainer" containerID="40ac9277f765c64a6048308efb71c5191855576b3396ea888ee9dfd0fd56d1d3" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.275632 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.300751 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj"] Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.304540 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-hvmkj"] Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.311991 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cc7mk"] Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.315104 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-cc7mk"] Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.871150 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" path="/var/lib/kubelet/pods/20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb/volumes" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.872910 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" path="/var/lib/kubelet/pods/f9ea79ab-de4c-4165-82d4-84b9d73df5a4/volumes" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.982947 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n"] Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983496 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983525 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983539 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983547 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983556 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983566 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983576 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983584 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983597 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" containerName="route-controller-manager" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983604 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" containerName="route-controller-manager" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983615 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983624 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983635 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983643 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983653 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983661 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983672 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983680 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983690 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983697 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983710 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983717 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983728 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983735 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983747 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983754 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983763 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983771 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983801 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983810 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983819 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983828 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983839 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" containerName="controller-manager" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983847 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" containerName="controller-manager" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983856 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983862 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983870 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3733b15a-cd45-418e-9452-79a33535ce35" containerName="marketplace-operator" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983875 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3733b15a-cd45-418e-9452-79a33535ce35" containerName="marketplace-operator" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983883 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983888 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983895 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983900 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="extract-content" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983908 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983913 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983920 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983929 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: E1205 17:38:28.983938 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.983944 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="extract-utilities" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984039 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6ae1ad4f-caa6-49b9-9d32-6905088903bf" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984052 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="81f0c1f1-bb8e-4ae4-a87b-460ec2f12258" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984059 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a8bb3a1-6049-4a0e-8d86-3ddd148686b9" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984067 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="20e84a3e-0a8d-4bc6-9554-1fd6cf45a7bb" containerName="controller-manager" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984075 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f9ea79ab-de4c-4165-82d4-84b9d73df5a4" containerName="route-controller-manager" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984083 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="39220eba-93d5-4e82-89a9-97d1383522a4" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984090 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ab60ade-3b80-4731-9fa7-09c77a0fa666" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984096 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb4981fe-4b26-4626-8c45-ba311dc825d9" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984104 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="3733b15a-cd45-418e-9452-79a33535ce35" containerName="marketplace-operator" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984112 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="af3d9725-8492-4a3a-b125-a545b2d4c8c7" containerName="registry-server" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.984569 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.987274 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.988303 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.988458 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.988645 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-6b977fcf98-dgzx4"] Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.989662 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.990623 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.990907 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.991549 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.993968 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.994270 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.994631 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.994759 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.994939 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.996665 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n"] Dec 05 17:38:28 crc kubenswrapper[4961]: I1205 17:38:28.996999 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.001649 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.008948 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b977fcf98-dgzx4"] Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.129730 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8391289-6ddb-4b2e-bd78-89e34a0da534-serving-cert\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.129825 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-config\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.129966 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce7961f1-66be-4f10-8271-44d91d88a031-serving-cert\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.130017 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-proxy-ca-bundles\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.130145 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-client-ca\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.130263 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-config\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.130301 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcgrp\" (UniqueName: \"kubernetes.io/projected/ce7961f1-66be-4f10-8271-44d91d88a031-kube-api-access-tcgrp\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.130329 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r6z7x\" (UniqueName: \"kubernetes.io/projected/e8391289-6ddb-4b2e-bd78-89e34a0da534-kube-api-access-r6z7x\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.130399 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-client-ca\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232007 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-config\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232066 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcgrp\" (UniqueName: \"kubernetes.io/projected/ce7961f1-66be-4f10-8271-44d91d88a031-kube-api-access-tcgrp\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232096 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r6z7x\" (UniqueName: \"kubernetes.io/projected/e8391289-6ddb-4b2e-bd78-89e34a0da534-kube-api-access-r6z7x\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232123 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-client-ca\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232162 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8391289-6ddb-4b2e-bd78-89e34a0da534-serving-cert\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232186 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-config\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.232936 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-proxy-ca-bundles\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.233027 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce7961f1-66be-4f10-8271-44d91d88a031-serving-cert\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.233121 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-client-ca\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.233374 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-config\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.233730 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-client-ca\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.234128 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-client-ca\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.234641 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-proxy-ca-bundles\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.236036 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ce7961f1-66be-4f10-8271-44d91d88a031-config\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.237684 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ce7961f1-66be-4f10-8271-44d91d88a031-serving-cert\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.240943 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8391289-6ddb-4b2e-bd78-89e34a0da534-serving-cert\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.247977 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r6z7x\" (UniqueName: \"kubernetes.io/projected/e8391289-6ddb-4b2e-bd78-89e34a0da534-kube-api-access-r6z7x\") pod \"route-controller-manager-745f64c84c-qvc5n\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.248399 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcgrp\" (UniqueName: \"kubernetes.io/projected/ce7961f1-66be-4f10-8271-44d91d88a031-kube-api-access-tcgrp\") pod \"controller-manager-6b977fcf98-dgzx4\" (UID: \"ce7961f1-66be-4f10-8271-44d91d88a031\") " pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.310940 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.339250 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.573388 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n"] Dec 05 17:38:29 crc kubenswrapper[4961]: I1205 17:38:29.829180 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-6b977fcf98-dgzx4"] Dec 05 17:38:29 crc kubenswrapper[4961]: W1205 17:38:29.833146 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podce7961f1_66be_4f10_8271_44d91d88a031.slice/crio-e6e7d9cd10ee99a854945cd2e06106d6bfaa95ef0187c03070b0e3c516ee4598 WatchSource:0}: Error finding container e6e7d9cd10ee99a854945cd2e06106d6bfaa95ef0187c03070b0e3c516ee4598: Status 404 returned error can't find the container with id e6e7d9cd10ee99a854945cd2e06106d6bfaa95ef0187c03070b0e3c516ee4598 Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.289841 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" event={"ID":"e8391289-6ddb-4b2e-bd78-89e34a0da534","Type":"ContainerStarted","Data":"9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e"} Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.290227 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" event={"ID":"e8391289-6ddb-4b2e-bd78-89e34a0da534","Type":"ContainerStarted","Data":"4185af246d6c3f812e68b39b0b3f0f4143425a37eb9ce72c1dfd05bbd1f494cb"} Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.290276 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.291372 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" event={"ID":"ce7961f1-66be-4f10-8271-44d91d88a031","Type":"ContainerStarted","Data":"646f7d46bf044ced59b019e1556a1d31938c4aa010c748fb6e21d4388179a136"} Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.291483 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" event={"ID":"ce7961f1-66be-4f10-8271-44d91d88a031","Type":"ContainerStarted","Data":"e6e7d9cd10ee99a854945cd2e06106d6bfaa95ef0187c03070b0e3c516ee4598"} Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.291513 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.296130 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.297355 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.311581 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" podStartSLOduration=3.311561307 podStartE2EDuration="3.311561307s" podCreationTimestamp="2025-12-05 17:38:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:30.309265018 +0000 UTC m=+316.370415481" watchObservedRunningTime="2025-12-05 17:38:30.311561307 +0000 UTC m=+316.372711780" Dec 05 17:38:30 crc kubenswrapper[4961]: I1205 17:38:30.347302 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-6b977fcf98-dgzx4" podStartSLOduration=3.347281465 podStartE2EDuration="3.347281465s" podCreationTimestamp="2025-12-05 17:38:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:38:30.344688389 +0000 UTC m=+316.405838862" watchObservedRunningTime="2025-12-05 17:38:30.347281465 +0000 UTC m=+316.408431958" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.485137 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-crqgk"] Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.486727 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.489855 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.506844 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-crqgk"] Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.635040 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-catalog-content\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.635095 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8d7fz\" (UniqueName: \"kubernetes.io/projected/5186f207-07d2-4325-be3d-3e21a3b5de5e-kube-api-access-8d7fz\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.635277 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-utilities\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.684385 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x247h"] Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.686880 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.688730 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.694491 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x247h"] Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.736112 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8d7fz\" (UniqueName: \"kubernetes.io/projected/5186f207-07d2-4325-be3d-3e21a3b5de5e-kube-api-access-8d7fz\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.736206 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-utilities\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.736277 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-catalog-content\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.736697 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-utilities\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.736767 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-catalog-content\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.763238 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8d7fz\" (UniqueName: \"kubernetes.io/projected/5186f207-07d2-4325-be3d-3e21a3b5de5e-kube-api-access-8d7fz\") pod \"community-operators-crqgk\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.806747 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.836938 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-55v5v\" (UniqueName: \"kubernetes.io/projected/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-kube-api-access-55v5v\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.837437 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-utilities\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.837542 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-catalog-content\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.939513 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-55v5v\" (UniqueName: \"kubernetes.io/projected/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-kube-api-access-55v5v\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.939565 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-utilities\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.939587 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-catalog-content\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.940301 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-catalog-content\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.940386 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-utilities\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:44 crc kubenswrapper[4961]: I1205 17:38:44.957594 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-55v5v\" (UniqueName: \"kubernetes.io/projected/9f2bf186-07e1-4212-a88b-377cb9dcc1e4-kube-api-access-55v5v\") pod \"certified-operators-x247h\" (UID: \"9f2bf186-07e1-4212-a88b-377cb9dcc1e4\") " pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:45 crc kubenswrapper[4961]: I1205 17:38:45.011717 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:45 crc kubenswrapper[4961]: I1205 17:38:45.263859 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-crqgk"] Dec 05 17:38:45 crc kubenswrapper[4961]: W1205 17:38:45.274068 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5186f207_07d2_4325_be3d_3e21a3b5de5e.slice/crio-b77c2d822d76a92e8b1079ecb7cd8375f3826c1d0ad083590779c6a991e5f99f WatchSource:0}: Error finding container b77c2d822d76a92e8b1079ecb7cd8375f3826c1d0ad083590779c6a991e5f99f: Status 404 returned error can't find the container with id b77c2d822d76a92e8b1079ecb7cd8375f3826c1d0ad083590779c6a991e5f99f Dec 05 17:38:45 crc kubenswrapper[4961]: I1205 17:38:45.365965 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crqgk" event={"ID":"5186f207-07d2-4325-be3d-3e21a3b5de5e","Type":"ContainerStarted","Data":"b77c2d822d76a92e8b1079ecb7cd8375f3826c1d0ad083590779c6a991e5f99f"} Dec 05 17:38:45 crc kubenswrapper[4961]: I1205 17:38:45.470233 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x247h"] Dec 05 17:38:45 crc kubenswrapper[4961]: W1205 17:38:45.476121 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f2bf186_07e1_4212_a88b_377cb9dcc1e4.slice/crio-de4e87b1e577ca644083b9a3ccd52c65c646a1d2458fd7b18f14620999784325 WatchSource:0}: Error finding container de4e87b1e577ca644083b9a3ccd52c65c646a1d2458fd7b18f14620999784325: Status 404 returned error can't find the container with id de4e87b1e577ca644083b9a3ccd52c65c646a1d2458fd7b18f14620999784325 Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.372045 4961 generic.go:334] "Generic (PLEG): container finished" podID="9f2bf186-07e1-4212-a88b-377cb9dcc1e4" containerID="50e10e0fad041c76465c4e44d3861f68f3545dbcf8e8568bed23b5aeaa8f7ebe" exitCode=0 Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.372146 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x247h" event={"ID":"9f2bf186-07e1-4212-a88b-377cb9dcc1e4","Type":"ContainerDied","Data":"50e10e0fad041c76465c4e44d3861f68f3545dbcf8e8568bed23b5aeaa8f7ebe"} Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.372457 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x247h" event={"ID":"9f2bf186-07e1-4212-a88b-377cb9dcc1e4","Type":"ContainerStarted","Data":"de4e87b1e577ca644083b9a3ccd52c65c646a1d2458fd7b18f14620999784325"} Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.375867 4961 generic.go:334] "Generic (PLEG): container finished" podID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerID="6c839e9c03d90d0d811aa453985a1cae2f7da682332a3b47c059f84c51cb2927" exitCode=0 Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.375919 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crqgk" event={"ID":"5186f207-07d2-4325-be3d-3e21a3b5de5e","Type":"ContainerDied","Data":"6c839e9c03d90d0d811aa453985a1cae2f7da682332a3b47c059f84c51cb2927"} Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.894145 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-6s9p6"] Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.895727 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.898730 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Dec 05 17:38:46 crc kubenswrapper[4961]: I1205 17:38:46.905031 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s9p6"] Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.067158 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a05d058-cde3-438c-805f-90d265994736-utilities\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.067232 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9cvz5\" (UniqueName: \"kubernetes.io/projected/6a05d058-cde3-438c-805f-90d265994736-kube-api-access-9cvz5\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.067264 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a05d058-cde3-438c-805f-90d265994736-catalog-content\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.084287 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9k6x4"] Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.085229 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.087788 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.098885 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9k6x4"] Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.168715 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a05d058-cde3-438c-805f-90d265994736-utilities\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.168803 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9cvz5\" (UniqueName: \"kubernetes.io/projected/6a05d058-cde3-438c-805f-90d265994736-kube-api-access-9cvz5\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.168848 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a05d058-cde3-438c-805f-90d265994736-catalog-content\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.169322 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a05d058-cde3-438c-805f-90d265994736-utilities\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.169364 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a05d058-cde3-438c-805f-90d265994736-catalog-content\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.186631 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9cvz5\" (UniqueName: \"kubernetes.io/projected/6a05d058-cde3-438c-805f-90d265994736-kube-api-access-9cvz5\") pod \"redhat-marketplace-6s9p6\" (UID: \"6a05d058-cde3-438c-805f-90d265994736\") " pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.220650 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.269793 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e3eba44-cc06-4bc6-83c2-66fcfde32591-utilities\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.269961 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-czlpq\" (UniqueName: \"kubernetes.io/projected/4e3eba44-cc06-4bc6-83c2-66fcfde32591-kube-api-access-czlpq\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.269995 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e3eba44-cc06-4bc6-83c2-66fcfde32591-catalog-content\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.371940 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-czlpq\" (UniqueName: \"kubernetes.io/projected/4e3eba44-cc06-4bc6-83c2-66fcfde32591-kube-api-access-czlpq\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.372477 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e3eba44-cc06-4bc6-83c2-66fcfde32591-catalog-content\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.372512 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e3eba44-cc06-4bc6-83c2-66fcfde32591-utilities\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.373444 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4e3eba44-cc06-4bc6-83c2-66fcfde32591-catalog-content\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.373610 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4e3eba44-cc06-4bc6-83c2-66fcfde32591-utilities\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.407721 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-czlpq\" (UniqueName: \"kubernetes.io/projected/4e3eba44-cc06-4bc6-83c2-66fcfde32591-kube-api-access-czlpq\") pod \"redhat-operators-9k6x4\" (UID: \"4e3eba44-cc06-4bc6-83c2-66fcfde32591\") " pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.667598 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-6s9p6"] Dec 05 17:38:47 crc kubenswrapper[4961]: I1205 17:38:47.701691 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.085290 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9k6x4"] Dec 05 17:38:48 crc kubenswrapper[4961]: W1205 17:38:48.098885 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4e3eba44_cc06_4bc6_83c2_66fcfde32591.slice/crio-cc42f6ac9980150e80aa6c537595b53e172a966efc76d8396bdbb95c72bce96f WatchSource:0}: Error finding container cc42f6ac9980150e80aa6c537595b53e172a966efc76d8396bdbb95c72bce96f: Status 404 returned error can't find the container with id cc42f6ac9980150e80aa6c537595b53e172a966efc76d8396bdbb95c72bce96f Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.399761 4961 generic.go:334] "Generic (PLEG): container finished" podID="4e3eba44-cc06-4bc6-83c2-66fcfde32591" containerID="885b17bc887bb4f0c740a09657f9f9124a5eca235dfd5c5a9b72b69c77ca3ee1" exitCode=0 Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.399862 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9k6x4" event={"ID":"4e3eba44-cc06-4bc6-83c2-66fcfde32591","Type":"ContainerDied","Data":"885b17bc887bb4f0c740a09657f9f9124a5eca235dfd5c5a9b72b69c77ca3ee1"} Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.399917 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9k6x4" event={"ID":"4e3eba44-cc06-4bc6-83c2-66fcfde32591","Type":"ContainerStarted","Data":"cc42f6ac9980150e80aa6c537595b53e172a966efc76d8396bdbb95c72bce96f"} Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.404448 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x247h" event={"ID":"9f2bf186-07e1-4212-a88b-377cb9dcc1e4","Type":"ContainerStarted","Data":"e5fa9e8648674063c9d269662e16291d2aa3683815234d8247ab73842193d58e"} Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.407411 4961 generic.go:334] "Generic (PLEG): container finished" podID="6a05d058-cde3-438c-805f-90d265994736" containerID="f2fce8490d8379441cc50589d30e1b5438e130cd16b87dc82903426e417499ab" exitCode=0 Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.407460 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s9p6" event={"ID":"6a05d058-cde3-438c-805f-90d265994736","Type":"ContainerDied","Data":"f2fce8490d8379441cc50589d30e1b5438e130cd16b87dc82903426e417499ab"} Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.407500 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s9p6" event={"ID":"6a05d058-cde3-438c-805f-90d265994736","Type":"ContainerStarted","Data":"efb7c97018eceb540912d0b6bb3b95e747920d61e551bd5e93c8f71e27bbef69"} Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.410393 4961 generic.go:334] "Generic (PLEG): container finished" podID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerID="e01da39d74981bef76d6832036d0eedbbcbbf26ffc6bf9a8d93d1442bd2c84b1" exitCode=0 Dec 05 17:38:48 crc kubenswrapper[4961]: I1205 17:38:48.410430 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crqgk" event={"ID":"5186f207-07d2-4325-be3d-3e21a3b5de5e","Type":"ContainerDied","Data":"e01da39d74981bef76d6832036d0eedbbcbbf26ffc6bf9a8d93d1442bd2c84b1"} Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.416728 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9k6x4" event={"ID":"4e3eba44-cc06-4bc6-83c2-66fcfde32591","Type":"ContainerStarted","Data":"8efcb5683c022901d1d878ab7121b9a4523f34e58a0d5307ec36961e6d4ac879"} Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.419247 4961 generic.go:334] "Generic (PLEG): container finished" podID="9f2bf186-07e1-4212-a88b-377cb9dcc1e4" containerID="e5fa9e8648674063c9d269662e16291d2aa3683815234d8247ab73842193d58e" exitCode=0 Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.419340 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x247h" event={"ID":"9f2bf186-07e1-4212-a88b-377cb9dcc1e4","Type":"ContainerDied","Data":"e5fa9e8648674063c9d269662e16291d2aa3683815234d8247ab73842193d58e"} Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.424092 4961 generic.go:334] "Generic (PLEG): container finished" podID="6a05d058-cde3-438c-805f-90d265994736" containerID="d8bf6f9ab6dcde2126fd1bf67cebe6837b612ea77b28d52258881ccc93c0d585" exitCode=0 Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.424169 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s9p6" event={"ID":"6a05d058-cde3-438c-805f-90d265994736","Type":"ContainerDied","Data":"d8bf6f9ab6dcde2126fd1bf67cebe6837b612ea77b28d52258881ccc93c0d585"} Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.428540 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crqgk" event={"ID":"5186f207-07d2-4325-be3d-3e21a3b5de5e","Type":"ContainerStarted","Data":"d5ce64c83d47e4cbe5cdcc6fbfeb0afcdb53f29d6e2c1a0464c3206e87c405c8"} Dec 05 17:38:49 crc kubenswrapper[4961]: I1205 17:38:49.452592 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-crqgk" podStartSLOduration=2.985723764 podStartE2EDuration="5.452575516s" podCreationTimestamp="2025-12-05 17:38:44 +0000 UTC" firstStartedPulling="2025-12-05 17:38:46.379728902 +0000 UTC m=+332.440879375" lastFinishedPulling="2025-12-05 17:38:48.846580644 +0000 UTC m=+334.907731127" observedRunningTime="2025-12-05 17:38:49.451876408 +0000 UTC m=+335.513026901" watchObservedRunningTime="2025-12-05 17:38:49.452575516 +0000 UTC m=+335.513725979" Dec 05 17:38:50 crc kubenswrapper[4961]: I1205 17:38:50.435977 4961 generic.go:334] "Generic (PLEG): container finished" podID="4e3eba44-cc06-4bc6-83c2-66fcfde32591" containerID="8efcb5683c022901d1d878ab7121b9a4523f34e58a0d5307ec36961e6d4ac879" exitCode=0 Dec 05 17:38:50 crc kubenswrapper[4961]: I1205 17:38:50.436298 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9k6x4" event={"ID":"4e3eba44-cc06-4bc6-83c2-66fcfde32591","Type":"ContainerDied","Data":"8efcb5683c022901d1d878ab7121b9a4523f34e58a0d5307ec36961e6d4ac879"} Dec 05 17:38:50 crc kubenswrapper[4961]: I1205 17:38:50.438967 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x247h" event={"ID":"9f2bf186-07e1-4212-a88b-377cb9dcc1e4","Type":"ContainerStarted","Data":"b943a43f7a1aedea089dc44712582d8c486bc4b64888f0af263674755ce411ef"} Dec 05 17:38:50 crc kubenswrapper[4961]: I1205 17:38:50.441362 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-6s9p6" event={"ID":"6a05d058-cde3-438c-805f-90d265994736","Type":"ContainerStarted","Data":"251022dbfb12a8bd769cecd324f2e8fec14cd14a4815bdf516d8a1f37b9c81c2"} Dec 05 17:38:50 crc kubenswrapper[4961]: I1205 17:38:50.483025 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x247h" podStartSLOduration=2.977287478 podStartE2EDuration="6.482824644s" podCreationTimestamp="2025-12-05 17:38:44 +0000 UTC" firstStartedPulling="2025-12-05 17:38:46.375691014 +0000 UTC m=+332.436841487" lastFinishedPulling="2025-12-05 17:38:49.88122819 +0000 UTC m=+335.942378653" observedRunningTime="2025-12-05 17:38:50.480366798 +0000 UTC m=+336.541517281" watchObservedRunningTime="2025-12-05 17:38:50.482824644 +0000 UTC m=+336.543975117" Dec 05 17:38:50 crc kubenswrapper[4961]: I1205 17:38:50.505810 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-6s9p6" podStartSLOduration=3.110580458 podStartE2EDuration="4.505792278s" podCreationTimestamp="2025-12-05 17:38:46 +0000 UTC" firstStartedPulling="2025-12-05 17:38:48.409351592 +0000 UTC m=+334.470502065" lastFinishedPulling="2025-12-05 17:38:49.804563402 +0000 UTC m=+335.865713885" observedRunningTime="2025-12-05 17:38:50.503405405 +0000 UTC m=+336.564555878" watchObservedRunningTime="2025-12-05 17:38:50.505792278 +0000 UTC m=+336.566942781" Dec 05 17:38:51 crc kubenswrapper[4961]: I1205 17:38:51.448583 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9k6x4" event={"ID":"4e3eba44-cc06-4bc6-83c2-66fcfde32591","Type":"ContainerStarted","Data":"6306f9fa094b46656f6e05cb01e82927a2e88f913a5dc3acfedbdf2a73a664ff"} Dec 05 17:38:51 crc kubenswrapper[4961]: I1205 17:38:51.468318 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9k6x4" podStartSLOduration=2.00768664 podStartE2EDuration="4.468302326s" podCreationTimestamp="2025-12-05 17:38:47 +0000 UTC" firstStartedPulling="2025-12-05 17:38:48.401860712 +0000 UTC m=+334.463011195" lastFinishedPulling="2025-12-05 17:38:50.862476398 +0000 UTC m=+336.923626881" observedRunningTime="2025-12-05 17:38:51.467226547 +0000 UTC m=+337.528377030" watchObservedRunningTime="2025-12-05 17:38:51.468302326 +0000 UTC m=+337.529452799" Dec 05 17:38:54 crc kubenswrapper[4961]: I1205 17:38:54.808055 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:54 crc kubenswrapper[4961]: I1205 17:38:54.808127 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:54 crc kubenswrapper[4961]: I1205 17:38:54.854403 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:55 crc kubenswrapper[4961]: I1205 17:38:55.012750 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:55 crc kubenswrapper[4961]: I1205 17:38:55.012986 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:55 crc kubenswrapper[4961]: I1205 17:38:55.054400 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:55 crc kubenswrapper[4961]: I1205 17:38:55.510989 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-crqgk" Dec 05 17:38:55 crc kubenswrapper[4961]: I1205 17:38:55.514975 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x247h" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.221727 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.222510 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.264192 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.515893 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-6s9p6" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.701460 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.702338 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:57 crc kubenswrapper[4961]: I1205 17:38:57.744190 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:38:58 crc kubenswrapper[4961]: I1205 17:38:58.532548 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9k6x4" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.209623 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lgclv"] Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.210550 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.226804 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lgclv"] Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320433 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rgpvs\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-kube-api-access-rgpvs\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320576 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23397d45-fea3-4c8e-a895-2d7327688da6-trusted-ca\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320610 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-registry-tls\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320634 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/23397d45-fea3-4c8e-a895-2d7327688da6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320675 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320841 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/23397d45-fea3-4c8e-a895-2d7327688da6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320907 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-bound-sa-token\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.320954 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/23397d45-fea3-4c8e-a895-2d7327688da6-registry-certificates\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.340609 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.421852 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23397d45-fea3-4c8e-a895-2d7327688da6-trusted-ca\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.421899 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-registry-tls\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.421920 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/23397d45-fea3-4c8e-a895-2d7327688da6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.421953 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/23397d45-fea3-4c8e-a895-2d7327688da6-registry-certificates\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.421975 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/23397d45-fea3-4c8e-a895-2d7327688da6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.421991 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-bound-sa-token\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.422039 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rgpvs\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-kube-api-access-rgpvs\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.423057 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/23397d45-fea3-4c8e-a895-2d7327688da6-ca-trust-extracted\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.423602 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/23397d45-fea3-4c8e-a895-2d7327688da6-registry-certificates\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.423612 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/23397d45-fea3-4c8e-a895-2d7327688da6-trusted-ca\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.428530 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-registry-tls\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.429383 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/23397d45-fea3-4c8e-a895-2d7327688da6-installation-pull-secrets\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.436468 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-bound-sa-token\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.437070 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rgpvs\" (UniqueName: \"kubernetes.io/projected/23397d45-fea3-4c8e-a895-2d7327688da6-kube-api-access-rgpvs\") pod \"image-registry-66df7c8f76-lgclv\" (UID: \"23397d45-fea3-4c8e-a895-2d7327688da6\") " pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.530848 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:01 crc kubenswrapper[4961]: I1205 17:39:01.987161 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-lgclv"] Dec 05 17:39:02 crc kubenswrapper[4961]: I1205 17:39:02.508571 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" event={"ID":"23397d45-fea3-4c8e-a895-2d7327688da6","Type":"ContainerStarted","Data":"a4ef34f9521bd8f5f7dd757f1005c48cf256b2ed0cad20aca1471ae6516fe884"} Dec 05 17:39:03 crc kubenswrapper[4961]: I1205 17:39:03.515156 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" event={"ID":"23397d45-fea3-4c8e-a895-2d7327688da6","Type":"ContainerStarted","Data":"ac574f59bd37b1a0e95d1a5b62427158d6de8dd9895b8cb399ed723f6eb91b56"} Dec 05 17:39:03 crc kubenswrapper[4961]: I1205 17:39:03.515532 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:03 crc kubenswrapper[4961]: I1205 17:39:03.534683 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" podStartSLOduration=2.5346630919999997 podStartE2EDuration="2.534663092s" podCreationTimestamp="2025-12-05 17:39:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:39:03.533756148 +0000 UTC m=+349.594906651" watchObservedRunningTime="2025-12-05 17:39:03.534663092 +0000 UTC m=+349.595813585" Dec 05 17:39:21 crc kubenswrapper[4961]: I1205 17:39:21.541201 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-lgclv" Dec 05 17:39:21 crc kubenswrapper[4961]: I1205 17:39:21.612487 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ksm5f"] Dec 05 17:39:26 crc kubenswrapper[4961]: I1205 17:39:26.908095 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n"] Dec 05 17:39:26 crc kubenswrapper[4961]: I1205 17:39:26.908584 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" podUID="e8391289-6ddb-4b2e-bd78-89e34a0da534" containerName="route-controller-manager" containerID="cri-o://9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e" gracePeriod=30 Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.246222 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.246609 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.460075 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.512626 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-config\") pod \"e8391289-6ddb-4b2e-bd78-89e34a0da534\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.512706 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r6z7x\" (UniqueName: \"kubernetes.io/projected/e8391289-6ddb-4b2e-bd78-89e34a0da534-kube-api-access-r6z7x\") pod \"e8391289-6ddb-4b2e-bd78-89e34a0da534\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.512745 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-client-ca\") pod \"e8391289-6ddb-4b2e-bd78-89e34a0da534\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.512788 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8391289-6ddb-4b2e-bd78-89e34a0da534-serving-cert\") pod \"e8391289-6ddb-4b2e-bd78-89e34a0da534\" (UID: \"e8391289-6ddb-4b2e-bd78-89e34a0da534\") " Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.513599 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-client-ca" (OuterVolumeSpecName: "client-ca") pod "e8391289-6ddb-4b2e-bd78-89e34a0da534" (UID: "e8391289-6ddb-4b2e-bd78-89e34a0da534"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.513795 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-config" (OuterVolumeSpecName: "config") pod "e8391289-6ddb-4b2e-bd78-89e34a0da534" (UID: "e8391289-6ddb-4b2e-bd78-89e34a0da534"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.518296 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e8391289-6ddb-4b2e-bd78-89e34a0da534-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e8391289-6ddb-4b2e-bd78-89e34a0da534" (UID: "e8391289-6ddb-4b2e-bd78-89e34a0da534"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.522008 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e8391289-6ddb-4b2e-bd78-89e34a0da534-kube-api-access-r6z7x" (OuterVolumeSpecName: "kube-api-access-r6z7x") pod "e8391289-6ddb-4b2e-bd78-89e34a0da534" (UID: "e8391289-6ddb-4b2e-bd78-89e34a0da534"). InnerVolumeSpecName "kube-api-access-r6z7x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.614088 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r6z7x\" (UniqueName: \"kubernetes.io/projected/e8391289-6ddb-4b2e-bd78-89e34a0da534-kube-api-access-r6z7x\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.614121 4961 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-client-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.614132 4961 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e8391289-6ddb-4b2e-bd78-89e34a0da534-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.614140 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e8391289-6ddb-4b2e-bd78-89e34a0da534-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.667468 4961 generic.go:334] "Generic (PLEG): container finished" podID="e8391289-6ddb-4b2e-bd78-89e34a0da534" containerID="9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e" exitCode=0 Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.667528 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" event={"ID":"e8391289-6ddb-4b2e-bd78-89e34a0da534","Type":"ContainerDied","Data":"9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e"} Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.667554 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.667577 4961 scope.go:117] "RemoveContainer" containerID="9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.667564 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n" event={"ID":"e8391289-6ddb-4b2e-bd78-89e34a0da534","Type":"ContainerDied","Data":"4185af246d6c3f812e68b39b0b3f0f4143425a37eb9ce72c1dfd05bbd1f494cb"} Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.690227 4961 scope.go:117] "RemoveContainer" containerID="9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e" Dec 05 17:39:27 crc kubenswrapper[4961]: E1205 17:39:27.693665 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e\": container with ID starting with 9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e not found: ID does not exist" containerID="9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.693839 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e"} err="failed to get container status \"9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e\": rpc error: code = NotFound desc = could not find container \"9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e\": container with ID starting with 9a0b22b7a813b7e828cfe06c0e2b196fe44422762b49ed96c083cb15bd1a792e not found: ID does not exist" Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.699817 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n"] Dec 05 17:39:27 crc kubenswrapper[4961]: I1205 17:39:27.703719 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-745f64c84c-qvc5n"] Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.039510 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-769764c597-8chts"] Dec 05 17:39:28 crc kubenswrapper[4961]: E1205 17:39:28.039722 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e8391289-6ddb-4b2e-bd78-89e34a0da534" containerName="route-controller-manager" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.039734 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e8391289-6ddb-4b2e-bd78-89e34a0da534" containerName="route-controller-manager" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.039839 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="e8391289-6ddb-4b2e-bd78-89e34a0da534" containerName="route-controller-manager" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.040193 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.042285 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.042461 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.042619 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.042730 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.042849 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.043051 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.054572 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-769764c597-8chts"] Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.119694 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4131de0d-ce98-41db-ba2a-7a1ff31cc495-config\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.119742 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4131de0d-ce98-41db-ba2a-7a1ff31cc495-client-ca\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.119791 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m9lxk\" (UniqueName: \"kubernetes.io/projected/4131de0d-ce98-41db-ba2a-7a1ff31cc495-kube-api-access-m9lxk\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.119987 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4131de0d-ce98-41db-ba2a-7a1ff31cc495-serving-cert\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.221002 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4131de0d-ce98-41db-ba2a-7a1ff31cc495-config\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.221055 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4131de0d-ce98-41db-ba2a-7a1ff31cc495-client-ca\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.221083 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m9lxk\" (UniqueName: \"kubernetes.io/projected/4131de0d-ce98-41db-ba2a-7a1ff31cc495-kube-api-access-m9lxk\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.221128 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4131de0d-ce98-41db-ba2a-7a1ff31cc495-serving-cert\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.223014 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/4131de0d-ce98-41db-ba2a-7a1ff31cc495-client-ca\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.224402 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4131de0d-ce98-41db-ba2a-7a1ff31cc495-config\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.225952 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4131de0d-ce98-41db-ba2a-7a1ff31cc495-serving-cert\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.246832 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m9lxk\" (UniqueName: \"kubernetes.io/projected/4131de0d-ce98-41db-ba2a-7a1ff31cc495-kube-api-access-m9lxk\") pod \"route-controller-manager-769764c597-8chts\" (UID: \"4131de0d-ce98-41db-ba2a-7a1ff31cc495\") " pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.365338 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:28 crc kubenswrapper[4961]: W1205 17:39:28.781482 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4131de0d_ce98_41db_ba2a_7a1ff31cc495.slice/crio-864f25165768b46a9d3795f3a5d5829c37f58d29e8403dffbeeaccce4d0675d4 WatchSource:0}: Error finding container 864f25165768b46a9d3795f3a5d5829c37f58d29e8403dffbeeaccce4d0675d4: Status 404 returned error can't find the container with id 864f25165768b46a9d3795f3a5d5829c37f58d29e8403dffbeeaccce4d0675d4 Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.785576 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-769764c597-8chts"] Dec 05 17:39:28 crc kubenswrapper[4961]: I1205 17:39:28.873034 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e8391289-6ddb-4b2e-bd78-89e34a0da534" path="/var/lib/kubelet/pods/e8391289-6ddb-4b2e-bd78-89e34a0da534/volumes" Dec 05 17:39:29 crc kubenswrapper[4961]: I1205 17:39:29.685337 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" event={"ID":"4131de0d-ce98-41db-ba2a-7a1ff31cc495","Type":"ContainerStarted","Data":"56622dd88f010f3339d555d771b203864724ad2ef28af9d81e9f09e044b497e7"} Dec 05 17:39:29 crc kubenswrapper[4961]: I1205 17:39:29.686574 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" event={"ID":"4131de0d-ce98-41db-ba2a-7a1ff31cc495","Type":"ContainerStarted","Data":"864f25165768b46a9d3795f3a5d5829c37f58d29e8403dffbeeaccce4d0675d4"} Dec 05 17:39:29 crc kubenswrapper[4961]: I1205 17:39:29.686592 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:29 crc kubenswrapper[4961]: I1205 17:39:29.690964 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" Dec 05 17:39:29 crc kubenswrapper[4961]: I1205 17:39:29.704565 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-769764c597-8chts" podStartSLOduration=3.7045495920000002 podStartE2EDuration="3.704549592s" podCreationTimestamp="2025-12-05 17:39:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:39:29.704055918 +0000 UTC m=+375.765206401" watchObservedRunningTime="2025-12-05 17:39:29.704549592 +0000 UTC m=+375.765700065" Dec 05 17:39:46 crc kubenswrapper[4961]: I1205 17:39:46.654927 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" podUID="31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" containerName="registry" containerID="cri-o://7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea" gracePeriod=30 Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.018737 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.083968 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tbv5x\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-kube-api-access-tbv5x\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084301 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-certificates\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084324 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-trusted-ca\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084355 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-ca-trust-extracted\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084385 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-bound-sa-token\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084409 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-tls\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084591 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.084622 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-installation-pull-secrets\") pod \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\" (UID: \"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc\") " Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.085108 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.085249 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.089650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.089962 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.095278 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.099973 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.100781 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.100928 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-kube-api-access-tbv5x" (OuterVolumeSpecName: "kube-api-access-tbv5x") pod "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" (UID: "31cbe53c-5dbf-4d21-8daa-da44aa13b7dc"). InnerVolumeSpecName "kube-api-access-tbv5x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186070 4961 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-bound-sa-token\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186109 4961 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186120 4961 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186132 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tbv5x\" (UniqueName: \"kubernetes.io/projected/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-kube-api-access-tbv5x\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186141 4961 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-registry-certificates\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186148 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-trusted-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.186156 4961 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.787302 4961 generic.go:334] "Generic (PLEG): container finished" podID="31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" containerID="7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea" exitCode=0 Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.787342 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" event={"ID":"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc","Type":"ContainerDied","Data":"7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea"} Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.787365 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" event={"ID":"31cbe53c-5dbf-4d21-8daa-da44aa13b7dc","Type":"ContainerDied","Data":"cb4ae14b4140cbf7ab68e0484bb011d1812010fd79391222cc6f9e6a51960220"} Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.787381 4961 scope.go:117] "RemoveContainer" containerID="7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.787381 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-ksm5f" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.805097 4961 scope.go:117] "RemoveContainer" containerID="7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea" Dec 05 17:39:47 crc kubenswrapper[4961]: E1205 17:39:47.810495 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea\": container with ID starting with 7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea not found: ID does not exist" containerID="7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.810544 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea"} err="failed to get container status \"7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea\": rpc error: code = NotFound desc = could not find container \"7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea\": container with ID starting with 7bf1b4245e856ca773cb452d087a82625e7fadcf9dfc84a5c0314d742fc3feea not found: ID does not exist" Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.818302 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ksm5f"] Dec 05 17:39:47 crc kubenswrapper[4961]: I1205 17:39:47.824112 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-ksm5f"] Dec 05 17:39:48 crc kubenswrapper[4961]: I1205 17:39:48.869333 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" path="/var/lib/kubelet/pods/31cbe53c-5dbf-4d21-8daa-da44aa13b7dc/volumes" Dec 05 17:39:57 crc kubenswrapper[4961]: I1205 17:39:57.245296 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:39:57 crc kubenswrapper[4961]: I1205 17:39:57.246920 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:40:27 crc kubenswrapper[4961]: I1205 17:40:27.246108 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:40:27 crc kubenswrapper[4961]: I1205 17:40:27.248728 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:40:27 crc kubenswrapper[4961]: I1205 17:40:27.249041 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:40:27 crc kubenswrapper[4961]: I1205 17:40:27.250246 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ca99d8e70567623b8c5ad00a1a0c15543d5fe98ccba8ac7da75e4ab98c660364"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:40:27 crc kubenswrapper[4961]: I1205 17:40:27.251233 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://ca99d8e70567623b8c5ad00a1a0c15543d5fe98ccba8ac7da75e4ab98c660364" gracePeriod=600 Dec 05 17:40:28 crc kubenswrapper[4961]: I1205 17:40:28.008752 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="ca99d8e70567623b8c5ad00a1a0c15543d5fe98ccba8ac7da75e4ab98c660364" exitCode=0 Dec 05 17:40:28 crc kubenswrapper[4961]: I1205 17:40:28.008841 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"ca99d8e70567623b8c5ad00a1a0c15543d5fe98ccba8ac7da75e4ab98c660364"} Dec 05 17:40:28 crc kubenswrapper[4961]: I1205 17:40:28.009220 4961 scope.go:117] "RemoveContainer" containerID="384dbb48a7f0f31e4e86badeb6b8f7521f40e3d34333623402e2ec07b4476bd8" Dec 05 17:40:29 crc kubenswrapper[4961]: I1205 17:40:29.016326 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"186f0ffa4d8d4244121fc3f97d5899df054ba290ecdaef527b5b09c64012516b"} Dec 05 17:42:15 crc kubenswrapper[4961]: I1205 17:42:15.030618 4961 scope.go:117] "RemoveContainer" containerID="bb6c570c3a003db52c6bbe61fd99fae6f7705cd2cb58b385a5e2c19599728a56" Dec 05 17:42:15 crc kubenswrapper[4961]: I1205 17:42:15.064105 4961 scope.go:117] "RemoveContainer" containerID="731d3a956876b7765d75b7f9328e4833245a4f186e07e83856ba148c6148369e" Dec 05 17:42:57 crc kubenswrapper[4961]: I1205 17:42:57.246355 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:42:57 crc kubenswrapper[4961]: I1205 17:42:57.247156 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:43:15 crc kubenswrapper[4961]: I1205 17:43:15.096904 4961 scope.go:117] "RemoveContainer" containerID="242f80cb3e436632876cfe15cbd30d15f87a162d43d64c8f8aad8a68db8401f8" Dec 05 17:43:15 crc kubenswrapper[4961]: I1205 17:43:15.124452 4961 scope.go:117] "RemoveContainer" containerID="00cfd59f70f98ac728eeb6e4c2684cccfa400aa4756d1c7297314af4ed3e4328" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.099727 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-jxh56"] Dec 05 17:43:24 crc kubenswrapper[4961]: E1205 17:43:24.100256 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" containerName="registry" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.100268 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" containerName="registry" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.100366 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="31cbe53c-5dbf-4d21-8daa-da44aa13b7dc" containerName="registry" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.100727 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.103743 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"openshift-service-ca.crt" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.103815 4961 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-cainjector-dockercfg-l5c22" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.103856 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"cert-manager"/"kube-root-ca.crt" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.109613 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-5b446d88c5-w62zs"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.110374 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-w62zs" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.111903 4961 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-dockercfg-2fzdr" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.114487 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-jxh56"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.118594 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-w62zs"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.132807 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-xzk6r"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.133614 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.137730 4961 reflector.go:368] Caches populated for *v1.Secret from object-"cert-manager"/"cert-manager-webhook-dockercfg-twn87" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.150829 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-xzk6r"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.286416 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4wfdp\" (UniqueName: \"kubernetes.io/projected/6a524c2b-8219-4740-b09e-0b855aa04c35-kube-api-access-4wfdp\") pod \"cert-manager-webhook-5655c58dd6-xzk6r\" (UID: \"6a524c2b-8219-4740-b09e-0b855aa04c35\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.286489 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdjg9\" (UniqueName: \"kubernetes.io/projected/69ce6fc6-0715-4f9c-9c01-3db5dfcbf386-kube-api-access-tdjg9\") pod \"cert-manager-5b446d88c5-w62zs\" (UID: \"69ce6fc6-0715-4f9c-9c01-3db5dfcbf386\") " pod="cert-manager/cert-manager-5b446d88c5-w62zs" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.286527 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mdddk\" (UniqueName: \"kubernetes.io/projected/3186034b-e42e-4c32-a5ad-942d6bbb0659-kube-api-access-mdddk\") pod \"cert-manager-cainjector-7f985d654d-jxh56\" (UID: \"3186034b-e42e-4c32-a5ad-942d6bbb0659\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.387700 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mdddk\" (UniqueName: \"kubernetes.io/projected/3186034b-e42e-4c32-a5ad-942d6bbb0659-kube-api-access-mdddk\") pod \"cert-manager-cainjector-7f985d654d-jxh56\" (UID: \"3186034b-e42e-4c32-a5ad-942d6bbb0659\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.387787 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4wfdp\" (UniqueName: \"kubernetes.io/projected/6a524c2b-8219-4740-b09e-0b855aa04c35-kube-api-access-4wfdp\") pod \"cert-manager-webhook-5655c58dd6-xzk6r\" (UID: \"6a524c2b-8219-4740-b09e-0b855aa04c35\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.387848 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdjg9\" (UniqueName: \"kubernetes.io/projected/69ce6fc6-0715-4f9c-9c01-3db5dfcbf386-kube-api-access-tdjg9\") pod \"cert-manager-5b446d88c5-w62zs\" (UID: \"69ce6fc6-0715-4f9c-9c01-3db5dfcbf386\") " pod="cert-manager/cert-manager-5b446d88c5-w62zs" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.406691 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdjg9\" (UniqueName: \"kubernetes.io/projected/69ce6fc6-0715-4f9c-9c01-3db5dfcbf386-kube-api-access-tdjg9\") pod \"cert-manager-5b446d88c5-w62zs\" (UID: \"69ce6fc6-0715-4f9c-9c01-3db5dfcbf386\") " pod="cert-manager/cert-manager-5b446d88c5-w62zs" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.406701 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mdddk\" (UniqueName: \"kubernetes.io/projected/3186034b-e42e-4c32-a5ad-942d6bbb0659-kube-api-access-mdddk\") pod \"cert-manager-cainjector-7f985d654d-jxh56\" (UID: \"3186034b-e42e-4c32-a5ad-942d6bbb0659\") " pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.407243 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4wfdp\" (UniqueName: \"kubernetes.io/projected/6a524c2b-8219-4740-b09e-0b855aa04c35-kube-api-access-4wfdp\") pod \"cert-manager-webhook-5655c58dd6-xzk6r\" (UID: \"6a524c2b-8219-4740-b09e-0b855aa04c35\") " pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.423883 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.434235 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-5b446d88c5-w62zs" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.453489 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.634037 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-5b446d88c5-w62zs"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.646569 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:43:24 crc kubenswrapper[4961]: W1205 17:43:24.899497 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6a524c2b_8219_4740_b09e_0b855aa04c35.slice/crio-86edfd7653dfe7023901051f5f23a5aec96f904dbff25c21da24afbbbfea0145 WatchSource:0}: Error finding container 86edfd7653dfe7023901051f5f23a5aec96f904dbff25c21da24afbbbfea0145: Status 404 returned error can't find the container with id 86edfd7653dfe7023901051f5f23a5aec96f904dbff25c21da24afbbbfea0145 Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.903868 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-webhook-5655c58dd6-xzk6r"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.910357 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["cert-manager/cert-manager-cainjector-7f985d654d-jxh56"] Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.996226 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-w62zs" event={"ID":"69ce6fc6-0715-4f9c-9c01-3db5dfcbf386","Type":"ContainerStarted","Data":"916b3205b7f6691d95b428ec85cb8e24a8f5792d8eda36e1d798fdc72cc6ea0d"} Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.998003 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" event={"ID":"3186034b-e42e-4c32-a5ad-942d6bbb0659","Type":"ContainerStarted","Data":"c34aa68dfe744b8402f656a20822b2616608db563f50ab25f75a8244e83b5b2e"} Dec 05 17:43:24 crc kubenswrapper[4961]: I1205 17:43:24.998901 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" event={"ID":"6a524c2b-8219-4740-b09e-0b855aa04c35","Type":"ContainerStarted","Data":"86edfd7653dfe7023901051f5f23a5aec96f904dbff25c21da24afbbbfea0145"} Dec 05 17:43:27 crc kubenswrapper[4961]: I1205 17:43:27.245878 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:43:27 crc kubenswrapper[4961]: I1205 17:43:27.246225 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.033712 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-5b446d88c5-w62zs" event={"ID":"69ce6fc6-0715-4f9c-9c01-3db5dfcbf386","Type":"ContainerStarted","Data":"2d114f72beec68f7f962423ad6f70657b85dc3b5ec6521ab345cf688f314bbff"} Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.036376 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" event={"ID":"3186034b-e42e-4c32-a5ad-942d6bbb0659","Type":"ContainerStarted","Data":"e30fa3f0919f12adda488fd3775de7f49c0c8ecc3afc6110093e9e89e2fce74f"} Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.039206 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" event={"ID":"6a524c2b-8219-4740-b09e-0b855aa04c35","Type":"ContainerStarted","Data":"4a0489216f146324486558da11a9d657508eaf40670a82757ce89c263f1cc005"} Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.039417 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.064447 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-5b446d88c5-w62zs" podStartSLOduration=2.030199905 podStartE2EDuration="6.064422717s" podCreationTimestamp="2025-12-05 17:43:24 +0000 UTC" firstStartedPulling="2025-12-05 17:43:24.646248959 +0000 UTC m=+610.707399422" lastFinishedPulling="2025-12-05 17:43:28.680471761 +0000 UTC m=+614.741622234" observedRunningTime="2025-12-05 17:43:30.062749676 +0000 UTC m=+616.123900179" watchObservedRunningTime="2025-12-05 17:43:30.064422717 +0000 UTC m=+616.125573220" Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.088302 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-cainjector-7f985d654d-jxh56" podStartSLOduration=2.159852724 podStartE2EDuration="6.088279632s" podCreationTimestamp="2025-12-05 17:43:24 +0000 UTC" firstStartedPulling="2025-12-05 17:43:24.903044755 +0000 UTC m=+610.964195228" lastFinishedPulling="2025-12-05 17:43:28.831471663 +0000 UTC m=+614.892622136" observedRunningTime="2025-12-05 17:43:30.084818377 +0000 UTC m=+616.145968900" watchObservedRunningTime="2025-12-05 17:43:30.088279632 +0000 UTC m=+616.149430115" Dec 05 17:43:30 crc kubenswrapper[4961]: I1205 17:43:30.112284 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" podStartSLOduration=2.335647863 podStartE2EDuration="6.11225958s" podCreationTimestamp="2025-12-05 17:43:24 +0000 UTC" firstStartedPulling="2025-12-05 17:43:24.903373053 +0000 UTC m=+610.964523526" lastFinishedPulling="2025-12-05 17:43:28.67998476 +0000 UTC m=+614.741135243" observedRunningTime="2025-12-05 17:43:30.108895488 +0000 UTC m=+616.170046011" watchObservedRunningTime="2025-12-05 17:43:30.11225958 +0000 UTC m=+616.173410073" Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.933047 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5w9vd"] Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935388 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-controller" containerID="cri-o://32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935499 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935469 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="sbdb" containerID="cri-o://072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935526 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="northd" containerID="cri-o://233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935557 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-acl-logging" containerID="cri-o://9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935569 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-node" containerID="cri-o://4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.935416 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="nbdb" containerID="cri-o://2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" gracePeriod=30 Dec 05 17:43:33 crc kubenswrapper[4961]: I1205 17:43:33.963171 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" containerID="cri-o://11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" gracePeriod=30 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.064887 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/2.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.065255 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/1.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.065295 4961 generic.go:334] "Generic (PLEG): container finished" podID="26618630-1782-4ae8-af12-6f913fbddf5b" containerID="56a0e4e4ff839ae5900c84d69890f793ddb3bd84538df59669303c0bf98050f3" exitCode=2 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.065342 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerDied","Data":"56a0e4e4ff839ae5900c84d69890f793ddb3bd84538df59669303c0bf98050f3"} Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.065373 4961 scope.go:117] "RemoveContainer" containerID="af8e46afa0972933bb17315bc7de0592287240851f5178b7533fd918320062ab" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.065767 4961 scope.go:117] "RemoveContainer" containerID="56a0e4e4ff839ae5900c84d69890f793ddb3bd84538df59669303c0bf98050f3" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.065972 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-bgtgs_openshift-multus(26618630-1782-4ae8-af12-6f913fbddf5b)\"" pod="openshift-multus/multus-bgtgs" podUID="26618630-1782-4ae8-af12-6f913fbddf5b" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.078298 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/3.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.084385 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovn-acl-logging/0.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.084746 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovn-controller/0.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088050 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" exitCode=0 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088133 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" exitCode=0 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088143 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" exitCode=0 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088153 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" exitCode=143 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088162 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" exitCode=143 Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088185 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765"} Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088213 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd"} Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088226 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93"} Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088238 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415"} Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.088249 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8"} Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.227260 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/3.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.229463 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovn-acl-logging/0.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.229952 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovn-controller/0.log" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.230474 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294267 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-qwr6r"] Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294598 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294628 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294648 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294665 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294685 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294699 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294715 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="nbdb" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294728 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="nbdb" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294748 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-node" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294761 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-node" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294807 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294821 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294842 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-acl-logging" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294856 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-acl-logging" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294872 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294886 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294903 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="northd" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294916 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="northd" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294932 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kubecfg-setup" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294945 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kubecfg-setup" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.294965 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="sbdb" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.294977 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="sbdb" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295293 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295361 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295384 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295400 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovn-acl-logging" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295463 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-ovn-metrics" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295483 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295498 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="nbdb" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295558 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="sbdb" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295577 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295640 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="kube-rbac-proxy-node" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.295656 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="northd" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.296160 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.296317 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: E1205 17:43:34.296352 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.296418 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.298064 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerName="ovnkube-controller" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.300422 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315410 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-ovn\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315459 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315546 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-openvswitch\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315564 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315591 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-script-lib\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315671 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-systemd\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315910 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-var-lib-cni-networks-ovn-kubernetes\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316053 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-env-overrides\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.315957 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316047 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316126 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-systemd-units\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316261 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316325 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-slash" (OuterVolumeSpecName: "host-slash") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316285 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-slash\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316518 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-log-socket\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316607 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-netns\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316523 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316571 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-log-socket" (OuterVolumeSpecName: "log-socket") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316662 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316677 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-ovn-kubernetes\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316803 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-node-log\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316840 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-bin\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316875 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-config\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316882 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-node-log" (OuterVolumeSpecName: "node-log") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316922 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316926 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-var-lib-openvswitch\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316959 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.316990 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-netd\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317025 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-kubelet\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317061 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovn-node-metrics-cert\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317075 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317084 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pr468\" (UniqueName: \"kubernetes.io/projected/f64daea3-7a90-4012-bd0c-31b137bd1cae-kube-api-access-pr468\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317185 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317194 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317301 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317390 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-log-socket\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317459 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-ovn\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317533 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hqwl\" (UniqueName: \"kubernetes.io/projected/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-kube-api-access-6hqwl\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317597 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317673 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-var-lib-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317800 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovnkube-script-lib\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317864 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-node-log\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.317943 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-run-netns\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318014 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-cni-netd\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318142 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-systemd\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318212 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-kubelet\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318317 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-slash\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318414 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-systemd-units\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318517 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-cni-bin\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318638 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-etc-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318745 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-run-ovn-kubernetes\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318862 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-env-overrides\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.318945 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319033 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovnkube-config\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319093 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovn-node-metrics-cert\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319173 4961 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-netns\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319231 4961 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319281 4961 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-node-log\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319337 4961 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-bin\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319386 4961 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319437 4961 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319489 4961 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-cni-netd\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319537 4961 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-kubelet\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319584 4961 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319635 4961 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319688 4961 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319739 4961 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319811 4961 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/f64daea3-7a90-4012-bd0c-31b137bd1cae-env-overrides\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319874 4961 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-systemd-units\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319926 4961 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-host-slash\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.319985 4961 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-log-socket\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.322282 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f64daea3-7a90-4012-bd0c-31b137bd1cae-kube-api-access-pr468" (OuterVolumeSpecName: "kube-api-access-pr468") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "kube-api-access-pr468". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.322305 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.334020 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421036 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-etc-openvswitch\") pod \"f64daea3-7a90-4012-bd0c-31b137bd1cae\" (UID: \"f64daea3-7a90-4012-bd0c-31b137bd1cae\") " Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421169 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "f64daea3-7a90-4012-bd0c-31b137bd1cae" (UID: "f64daea3-7a90-4012-bd0c-31b137bd1cae"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421286 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-var-lib-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421315 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovnkube-script-lib\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421336 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-node-log\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421392 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-run-netns\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421400 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-var-lib-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421445 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-cni-netd\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421457 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-node-log\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421470 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-systemd\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421486 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-kubelet\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421498 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-run-netns\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421503 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-slash\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421537 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-systemd\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421544 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-systemd-units\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421568 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-cni-bin\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421575 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-cni-netd\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421614 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-slash\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421640 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-etc-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421649 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-kubelet\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421678 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-cni-bin\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421707 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-systemd-units\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421713 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-run-ovn-kubernetes\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421736 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-etc-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421742 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-env-overrides\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421761 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421811 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovnkube-config\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421766 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-run-ovn-kubernetes\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421827 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovn-node-metrics-cert\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421890 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-log-socket\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421919 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-ovn\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421944 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hqwl\" (UniqueName: \"kubernetes.io/projected/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-kube-api-access-6hqwl\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.421965 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422012 4961 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/f64daea3-7a90-4012-bd0c-31b137bd1cae-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422027 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pr468\" (UniqueName: \"kubernetes.io/projected/f64daea3-7a90-4012-bd0c-31b137bd1cae-kube-api-access-pr468\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422039 4961 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422052 4961 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/f64daea3-7a90-4012-bd0c-31b137bd1cae-run-systemd\") on node \"crc\" DevicePath \"\"" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422059 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovnkube-script-lib\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422084 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-openvswitch\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422115 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-log-socket\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422116 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422157 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-run-ovn\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422394 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-env-overrides\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.422817 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovnkube-config\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.425441 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-ovn-node-metrics-cert\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.441736 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hqwl\" (UniqueName: \"kubernetes.io/projected/bddbbbec-5680-4f62-ab99-b67eaaf29f8f-kube-api-access-6hqwl\") pod \"ovnkube-node-qwr6r\" (UID: \"bddbbbec-5680-4f62-ab99-b67eaaf29f8f\") " pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.455933 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="cert-manager/cert-manager-webhook-5655c58dd6-xzk6r" Dec 05 17:43:34 crc kubenswrapper[4961]: I1205 17:43:34.615457 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.095198 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/2.log" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.098736 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovnkube-controller/3.log" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.101803 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovn-acl-logging/0.log" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102486 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-5w9vd_f64daea3-7a90-4012-bd0c-31b137bd1cae/ovn-controller/0.log" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102855 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" exitCode=0 Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102886 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" exitCode=0 Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102897 4961 generic.go:334] "Generic (PLEG): container finished" podID="f64daea3-7a90-4012-bd0c-31b137bd1cae" containerID="233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" exitCode=0 Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102949 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9"} Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102982 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e"} Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.102998 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26"} Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.103011 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" event={"ID":"f64daea3-7a90-4012-bd0c-31b137bd1cae","Type":"ContainerDied","Data":"97c7c8cdb21ee1591d4a4b7780f671fae8cf19a9f096255dcc67f67ca9f56e1b"} Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.103030 4961 scope.go:117] "RemoveContainer" containerID="11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.103264 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-5w9vd" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.106614 4961 generic.go:334] "Generic (PLEG): container finished" podID="bddbbbec-5680-4f62-ab99-b67eaaf29f8f" containerID="415188f885e3ab7f67bae45415454cbcc0137f2c6f82081248ad42ffd50520db" exitCode=0 Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.106651 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerDied","Data":"415188f885e3ab7f67bae45415454cbcc0137f2c6f82081248ad42ffd50520db"} Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.106675 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"0c67b7d02e91f4032d5bab8fb40f66ba1741cbf8b7b475ee076bf6b624686411"} Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.125643 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5w9vd"] Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.129431 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-5w9vd"] Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.133173 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.166926 4961 scope.go:117] "RemoveContainer" containerID="072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.187274 4961 scope.go:117] "RemoveContainer" containerID="2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.208485 4961 scope.go:117] "RemoveContainer" containerID="233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.220689 4961 scope.go:117] "RemoveContainer" containerID="531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.233866 4961 scope.go:117] "RemoveContainer" containerID="4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.250513 4961 scope.go:117] "RemoveContainer" containerID="9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.273838 4961 scope.go:117] "RemoveContainer" containerID="32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.308422 4961 scope.go:117] "RemoveContainer" containerID="b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.335733 4961 scope.go:117] "RemoveContainer" containerID="11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.336330 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": container with ID starting with 11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9 not found: ID does not exist" containerID="11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.336382 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9"} err="failed to get container status \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": rpc error: code = NotFound desc = could not find container \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": container with ID starting with 11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.336418 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.336899 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": container with ID starting with 8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02 not found: ID does not exist" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.336925 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02"} err="failed to get container status \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": rpc error: code = NotFound desc = could not find container \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": container with ID starting with 8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.336947 4961 scope.go:117] "RemoveContainer" containerID="072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.337249 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": container with ID starting with 072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e not found: ID does not exist" containerID="072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.337305 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e"} err="failed to get container status \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": rpc error: code = NotFound desc = could not find container \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": container with ID starting with 072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.337339 4961 scope.go:117] "RemoveContainer" containerID="2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.337649 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": container with ID starting with 2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765 not found: ID does not exist" containerID="2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.337677 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765"} err="failed to get container status \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": rpc error: code = NotFound desc = could not find container \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": container with ID starting with 2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.337695 4961 scope.go:117] "RemoveContainer" containerID="233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.338026 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": container with ID starting with 233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26 not found: ID does not exist" containerID="233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.338046 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26"} err="failed to get container status \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": rpc error: code = NotFound desc = could not find container \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": container with ID starting with 233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.338060 4961 scope.go:117] "RemoveContainer" containerID="531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.338534 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": container with ID starting with 531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd not found: ID does not exist" containerID="531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.338552 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd"} err="failed to get container status \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": rpc error: code = NotFound desc = could not find container \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": container with ID starting with 531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.338566 4961 scope.go:117] "RemoveContainer" containerID="4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.338960 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": container with ID starting with 4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93 not found: ID does not exist" containerID="4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.338989 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93"} err="failed to get container status \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": rpc error: code = NotFound desc = could not find container \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": container with ID starting with 4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.339001 4961 scope.go:117] "RemoveContainer" containerID="9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.339306 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": container with ID starting with 9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415 not found: ID does not exist" containerID="9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.339336 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415"} err="failed to get container status \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": rpc error: code = NotFound desc = could not find container \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": container with ID starting with 9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.339354 4961 scope.go:117] "RemoveContainer" containerID="32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.339682 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": container with ID starting with 32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8 not found: ID does not exist" containerID="32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.339713 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8"} err="failed to get container status \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": rpc error: code = NotFound desc = could not find container \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": container with ID starting with 32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.339734 4961 scope.go:117] "RemoveContainer" containerID="b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca" Dec 05 17:43:35 crc kubenswrapper[4961]: E1205 17:43:35.340010 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": container with ID starting with b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca not found: ID does not exist" containerID="b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340036 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca"} err="failed to get container status \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": rpc error: code = NotFound desc = could not find container \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": container with ID starting with b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340050 4961 scope.go:117] "RemoveContainer" containerID="11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340230 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9"} err="failed to get container status \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": rpc error: code = NotFound desc = could not find container \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": container with ID starting with 11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340260 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340558 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02"} err="failed to get container status \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": rpc error: code = NotFound desc = could not find container \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": container with ID starting with 8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340578 4961 scope.go:117] "RemoveContainer" containerID="072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340903 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e"} err="failed to get container status \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": rpc error: code = NotFound desc = could not find container \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": container with ID starting with 072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.340934 4961 scope.go:117] "RemoveContainer" containerID="2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.341204 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765"} err="failed to get container status \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": rpc error: code = NotFound desc = could not find container \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": container with ID starting with 2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.341223 4961 scope.go:117] "RemoveContainer" containerID="233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.344497 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26"} err="failed to get container status \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": rpc error: code = NotFound desc = could not find container \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": container with ID starting with 233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.344550 4961 scope.go:117] "RemoveContainer" containerID="531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345314 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd"} err="failed to get container status \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": rpc error: code = NotFound desc = could not find container \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": container with ID starting with 531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345347 4961 scope.go:117] "RemoveContainer" containerID="4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345551 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93"} err="failed to get container status \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": rpc error: code = NotFound desc = could not find container \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": container with ID starting with 4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345574 4961 scope.go:117] "RemoveContainer" containerID="9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345740 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415"} err="failed to get container status \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": rpc error: code = NotFound desc = could not find container \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": container with ID starting with 9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345759 4961 scope.go:117] "RemoveContainer" containerID="32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345944 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8"} err="failed to get container status \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": rpc error: code = NotFound desc = could not find container \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": container with ID starting with 32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.345959 4961 scope.go:117] "RemoveContainer" containerID="b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346117 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca"} err="failed to get container status \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": rpc error: code = NotFound desc = could not find container \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": container with ID starting with b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346140 4961 scope.go:117] "RemoveContainer" containerID="11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346313 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9"} err="failed to get container status \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": rpc error: code = NotFound desc = could not find container \"11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9\": container with ID starting with 11daeee141f685444018fda0cd1d816c8fd3cb2424c7e936f290b30fcd16f3d9 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346564 4961 scope.go:117] "RemoveContainer" containerID="8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346734 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02"} err="failed to get container status \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": rpc error: code = NotFound desc = could not find container \"8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02\": container with ID starting with 8fea482733ff2ed68d4f7cc98c24060e76cf19161448d74b37d39888ee79ce02 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346749 4961 scope.go:117] "RemoveContainer" containerID="072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346942 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e"} err="failed to get container status \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": rpc error: code = NotFound desc = could not find container \"072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e\": container with ID starting with 072955851aaa0ca202df5c2cb6d399cd004719745d675ca669b89c64e88e309e not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.346957 4961 scope.go:117] "RemoveContainer" containerID="2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347121 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765"} err="failed to get container status \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": rpc error: code = NotFound desc = could not find container \"2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765\": container with ID starting with 2fbf10d0a5ad90696044d54456f3195973264c9b5d221b74a48b20c3d8362765 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347138 4961 scope.go:117] "RemoveContainer" containerID="233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347300 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26"} err="failed to get container status \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": rpc error: code = NotFound desc = could not find container \"233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26\": container with ID starting with 233ac65f7143421d3d03e84883438faa23d037d74a79fcd1a7d9569e088a0e26 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347317 4961 scope.go:117] "RemoveContainer" containerID="531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347473 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd"} err="failed to get container status \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": rpc error: code = NotFound desc = could not find container \"531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd\": container with ID starting with 531d042685bd7efbc2998bbf0b0499c7e24dd33f19bfc1a5f15aed0c78e73cbd not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347489 4961 scope.go:117] "RemoveContainer" containerID="4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347652 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93"} err="failed to get container status \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": rpc error: code = NotFound desc = could not find container \"4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93\": container with ID starting with 4e9b57366e76f3351200a753c83dfac6c9030e50677e51654d3b4d0afcf6cc93 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.347669 4961 scope.go:117] "RemoveContainer" containerID="9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.349359 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415"} err="failed to get container status \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": rpc error: code = NotFound desc = could not find container \"9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415\": container with ID starting with 9b97fc5ebb07c6be0e4cf3796d692958edd95cecb53f5362cc98e0f12f369415 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.349406 4961 scope.go:117] "RemoveContainer" containerID="32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.350913 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8"} err="failed to get container status \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": rpc error: code = NotFound desc = could not find container \"32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8\": container with ID starting with 32aba0cf48966ec0ac80dfaa8dbd28fd6665398ae6808371d539e18b8aab4ee8 not found: ID does not exist" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.350935 4961 scope.go:117] "RemoveContainer" containerID="b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca" Dec 05 17:43:35 crc kubenswrapper[4961]: I1205 17:43:35.351456 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca"} err="failed to get container status \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": rpc error: code = NotFound desc = could not find container \"b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca\": container with ID starting with b06cf58739e9ecf3953c93f872ffcdd3525e53cac4a6be60d6166fa93ea2e5ca not found: ID does not exist" Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.118384 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"58f051d34d9e61a930472a757b7ad43e00019480dc6fbc3160ff39ecf4cac6cb"} Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.118423 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"fc8ed4e6faf586e5ac1ec4ab5d811d09e8d4b6168c734543e4ed1afade849772"} Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.118434 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"5c349df942a40cad21f85908151731c5d4a3e95764eba5eb6b4e017673f303f7"} Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.118442 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"1653fceb597f511257a48d4afc5e293491159f5ada42c6d534234e1ed614b375"} Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.118454 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"be9bc63b092d7b761fefbc6165943b59347364e9ae39b2623ac5d78dd34e3351"} Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.118462 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"120ea2f5f3b3f72cc8848956c4b58057dbbd63e02c673dd95a8f818b83711a88"} Dec 05 17:43:36 crc kubenswrapper[4961]: I1205 17:43:36.880807 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f64daea3-7a90-4012-bd0c-31b137bd1cae" path="/var/lib/kubelet/pods/f64daea3-7a90-4012-bd0c-31b137bd1cae/volumes" Dec 05 17:43:39 crc kubenswrapper[4961]: I1205 17:43:39.138858 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"825b3d6a84c5c0261ebdf56c18e5ce90ad7717dcc09f1ef28721daeed98151dc"} Dec 05 17:43:41 crc kubenswrapper[4961]: I1205 17:43:41.152713 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" event={"ID":"bddbbbec-5680-4f62-ab99-b67eaaf29f8f","Type":"ContainerStarted","Data":"5922e18d74bfe5e9fd51349a1d3772eafa61ece839054f13e56d713e3d7685f3"} Dec 05 17:43:41 crc kubenswrapper[4961]: I1205 17:43:41.153103 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:41 crc kubenswrapper[4961]: I1205 17:43:41.194497 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:41 crc kubenswrapper[4961]: I1205 17:43:41.195931 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" podStartSLOduration=7.195911245 podStartE2EDuration="7.195911245s" podCreationTimestamp="2025-12-05 17:43:34 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:43:41.192700347 +0000 UTC m=+627.253850830" watchObservedRunningTime="2025-12-05 17:43:41.195911245 +0000 UTC m=+627.257061718" Dec 05 17:43:42 crc kubenswrapper[4961]: I1205 17:43:42.158672 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:42 crc kubenswrapper[4961]: I1205 17:43:42.158716 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:42 crc kubenswrapper[4961]: I1205 17:43:42.182530 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:43:45 crc kubenswrapper[4961]: I1205 17:43:45.865123 4961 scope.go:117] "RemoveContainer" containerID="56a0e4e4ff839ae5900c84d69890f793ddb3bd84538df59669303c0bf98050f3" Dec 05 17:43:45 crc kubenswrapper[4961]: E1205 17:43:45.865868 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-bgtgs_openshift-multus(26618630-1782-4ae8-af12-6f913fbddf5b)\"" pod="openshift-multus/multus-bgtgs" podUID="26618630-1782-4ae8-af12-6f913fbddf5b" Dec 05 17:43:57 crc kubenswrapper[4961]: I1205 17:43:57.245533 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:43:57 crc kubenswrapper[4961]: I1205 17:43:57.246953 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:43:57 crc kubenswrapper[4961]: I1205 17:43:57.247062 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:43:57 crc kubenswrapper[4961]: I1205 17:43:57.247954 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"186f0ffa4d8d4244121fc3f97d5899df054ba290ecdaef527b5b09c64012516b"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:43:57 crc kubenswrapper[4961]: I1205 17:43:57.248059 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://186f0ffa4d8d4244121fc3f97d5899df054ba290ecdaef527b5b09c64012516b" gracePeriod=600 Dec 05 17:43:58 crc kubenswrapper[4961]: I1205 17:43:58.248196 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="186f0ffa4d8d4244121fc3f97d5899df054ba290ecdaef527b5b09c64012516b" exitCode=0 Dec 05 17:43:58 crc kubenswrapper[4961]: I1205 17:43:58.248511 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"186f0ffa4d8d4244121fc3f97d5899df054ba290ecdaef527b5b09c64012516b"} Dec 05 17:43:58 crc kubenswrapper[4961]: I1205 17:43:58.248542 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"8298295ec17dfc63ef58bd3072d80116c1dcc72b110ce7d9f28c0734b811e20a"} Dec 05 17:43:58 crc kubenswrapper[4961]: I1205 17:43:58.248560 4961 scope.go:117] "RemoveContainer" containerID="ca99d8e70567623b8c5ad00a1a0c15543d5fe98ccba8ac7da75e4ab98c660364" Dec 05 17:43:58 crc kubenswrapper[4961]: I1205 17:43:58.864348 4961 scope.go:117] "RemoveContainer" containerID="56a0e4e4ff839ae5900c84d69890f793ddb3bd84538df59669303c0bf98050f3" Dec 05 17:43:59 crc kubenswrapper[4961]: I1205 17:43:59.257852 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-bgtgs_26618630-1782-4ae8-af12-6f913fbddf5b/kube-multus/2.log" Dec 05 17:43:59 crc kubenswrapper[4961]: I1205 17:43:59.258225 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-bgtgs" event={"ID":"26618630-1782-4ae8-af12-6f913fbddf5b","Type":"ContainerStarted","Data":"9416fbb6edb992473f6a1fface9ff42c6e990f5924e7787d4db99afadd114c70"} Dec 05 17:44:04 crc kubenswrapper[4961]: I1205 17:44:04.650207 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qwr6r" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.688881 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5"] Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.690847 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.692531 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.693119 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlrzt\" (UniqueName: \"kubernetes.io/projected/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-kube-api-access-mlrzt\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.693218 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.693255 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.701371 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5"] Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.794382 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.794747 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.794856 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlrzt\" (UniqueName: \"kubernetes.io/projected/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-kube-api-access-mlrzt\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.795026 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-util\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.795149 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-bundle\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:13 crc kubenswrapper[4961]: I1205 17:44:13.820248 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlrzt\" (UniqueName: \"kubernetes.io/projected/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-kube-api-access-mlrzt\") pod \"5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:14 crc kubenswrapper[4961]: I1205 17:44:14.014239 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:14 crc kubenswrapper[4961]: I1205 17:44:14.291593 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5"] Dec 05 17:44:14 crc kubenswrapper[4961]: W1205 17:44:14.295333 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dfe66b7_513e_4e53_854d_b1d0d9ca8acd.slice/crio-37344d7e9f2e985005c040429b9b449f2d4517109cef0b00bb6054af0abc1502 WatchSource:0}: Error finding container 37344d7e9f2e985005c040429b9b449f2d4517109cef0b00bb6054af0abc1502: Status 404 returned error can't find the container with id 37344d7e9f2e985005c040429b9b449f2d4517109cef0b00bb6054af0abc1502 Dec 05 17:44:14 crc kubenswrapper[4961]: I1205 17:44:14.344983 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" event={"ID":"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd","Type":"ContainerStarted","Data":"37344d7e9f2e985005c040429b9b449f2d4517109cef0b00bb6054af0abc1502"} Dec 05 17:44:15 crc kubenswrapper[4961]: I1205 17:44:15.351961 4961 generic.go:334] "Generic (PLEG): container finished" podID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerID="52856ebc867263a89cc2bb77f5c03722ec5f35dd74fc63ee57d46ed6cbff7c4a" exitCode=0 Dec 05 17:44:15 crc kubenswrapper[4961]: I1205 17:44:15.352045 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" event={"ID":"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd","Type":"ContainerDied","Data":"52856ebc867263a89cc2bb77f5c03722ec5f35dd74fc63ee57d46ed6cbff7c4a"} Dec 05 17:44:17 crc kubenswrapper[4961]: I1205 17:44:17.365748 4961 generic.go:334] "Generic (PLEG): container finished" podID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerID="3bd73f055beb78fa784b8836eb2c8559b25a091ad45b86e5fb6536ee7facc0e7" exitCode=0 Dec 05 17:44:17 crc kubenswrapper[4961]: I1205 17:44:17.365931 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" event={"ID":"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd","Type":"ContainerDied","Data":"3bd73f055beb78fa784b8836eb2c8559b25a091ad45b86e5fb6536ee7facc0e7"} Dec 05 17:44:18 crc kubenswrapper[4961]: I1205 17:44:18.374650 4961 generic.go:334] "Generic (PLEG): container finished" podID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerID="4becbbd312c2ee577fc28d22b9e21a15e574369ece0774e60fd11ab6931ac359" exitCode=0 Dec 05 17:44:18 crc kubenswrapper[4961]: I1205 17:44:18.374748 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" event={"ID":"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd","Type":"ContainerDied","Data":"4becbbd312c2ee577fc28d22b9e21a15e574369ece0774e60fd11ab6931ac359"} Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.610250 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.770518 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-bundle\") pod \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.770619 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-util\") pod \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.770762 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mlrzt\" (UniqueName: \"kubernetes.io/projected/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-kube-api-access-mlrzt\") pod \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\" (UID: \"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd\") " Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.772583 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-bundle" (OuterVolumeSpecName: "bundle") pod "3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" (UID: "3dfe66b7-513e-4e53-854d-b1d0d9ca8acd"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.779320 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-kube-api-access-mlrzt" (OuterVolumeSpecName: "kube-api-access-mlrzt") pod "3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" (UID: "3dfe66b7-513e-4e53-854d-b1d0d9ca8acd"). InnerVolumeSpecName "kube-api-access-mlrzt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.786000 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-util" (OuterVolumeSpecName: "util") pod "3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" (UID: "3dfe66b7-513e-4e53-854d-b1d0d9ca8acd"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.871950 4961 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.871988 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mlrzt\" (UniqueName: \"kubernetes.io/projected/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-kube-api-access-mlrzt\") on node \"crc\" DevicePath \"\"" Dec 05 17:44:19 crc kubenswrapper[4961]: I1205 17:44:19.872000 4961 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/3dfe66b7-513e-4e53-854d-b1d0d9ca8acd-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:44:20 crc kubenswrapper[4961]: I1205 17:44:20.390060 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" event={"ID":"3dfe66b7-513e-4e53-854d-b1d0d9ca8acd","Type":"ContainerDied","Data":"37344d7e9f2e985005c040429b9b449f2d4517109cef0b00bb6054af0abc1502"} Dec 05 17:44:20 crc kubenswrapper[4961]: I1205 17:44:20.390104 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="37344d7e9f2e985005c040429b9b449f2d4517109cef0b00bb6054af0abc1502" Dec 05 17:44:20 crc kubenswrapper[4961]: I1205 17:44:20.390110 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.213055 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh"] Dec 05 17:44:22 crc kubenswrapper[4961]: E1205 17:44:22.213326 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="extract" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.213343 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="extract" Dec 05 17:44:22 crc kubenswrapper[4961]: E1205 17:44:22.213370 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="util" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.213378 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="util" Dec 05 17:44:22 crc kubenswrapper[4961]: E1205 17:44:22.213388 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="pull" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.213396 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="pull" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.213510 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="3dfe66b7-513e-4e53-854d-b1d0d9ca8acd" containerName="extract" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.213985 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.215915 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"kube-root-ca.crt" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.216389 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-operator-dockercfg-46wtw" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.227486 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"openshift-service-ca.crt" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.231347 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh"] Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.402521 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kc6m\" (UniqueName: \"kubernetes.io/projected/7d46084a-4838-43be-80f5-54ada85ff38f-kube-api-access-9kc6m\") pod \"nmstate-operator-5b5b58f5c8-q2qxh\" (UID: \"7d46084a-4838-43be-80f5-54ada85ff38f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.503993 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kc6m\" (UniqueName: \"kubernetes.io/projected/7d46084a-4838-43be-80f5-54ada85ff38f-kube-api-access-9kc6m\") pod \"nmstate-operator-5b5b58f5c8-q2qxh\" (UID: \"7d46084a-4838-43be-80f5-54ada85ff38f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.520567 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kc6m\" (UniqueName: \"kubernetes.io/projected/7d46084a-4838-43be-80f5-54ada85ff38f-kube-api-access-9kc6m\") pod \"nmstate-operator-5b5b58f5c8-q2qxh\" (UID: \"7d46084a-4838-43be-80f5-54ada85ff38f\") " pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.528343 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" Dec 05 17:44:22 crc kubenswrapper[4961]: I1205 17:44:22.706183 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh"] Dec 05 17:44:23 crc kubenswrapper[4961]: I1205 17:44:23.439030 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" event={"ID":"7d46084a-4838-43be-80f5-54ada85ff38f","Type":"ContainerStarted","Data":"ef4888f6eceb1d7b2f7d8979e97cc88ad2e60103a8b6ffc9271d4626082f8041"} Dec 05 17:44:25 crc kubenswrapper[4961]: I1205 17:44:25.451256 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" event={"ID":"7d46084a-4838-43be-80f5-54ada85ff38f","Type":"ContainerStarted","Data":"528980410ac51d4e9dde08c4f2d8ff28a6ed08029568d40ecd1f100adfbedb4e"} Dec 05 17:44:25 crc kubenswrapper[4961]: I1205 17:44:25.471830 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-operator-5b5b58f5c8-q2qxh" podStartSLOduration=1.366299938 podStartE2EDuration="3.471810102s" podCreationTimestamp="2025-12-05 17:44:22 +0000 UTC" firstStartedPulling="2025-12-05 17:44:22.717569033 +0000 UTC m=+668.778719506" lastFinishedPulling="2025-12-05 17:44:24.823079197 +0000 UTC m=+670.884229670" observedRunningTime="2025-12-05 17:44:25.468191572 +0000 UTC m=+671.529342055" watchObservedRunningTime="2025-12-05 17:44:25.471810102 +0000 UTC m=+671.532960575" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.462420 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-r585k"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.463485 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.465759 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"nmstate-handler-dockercfg-j4srk" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.472563 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-r585k"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.486146 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.487009 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.497891 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"openshift-nmstate-webhook" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.503401 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.509172 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-handler-sp6lw"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.510264 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.604364 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q6m2q\" (UniqueName: \"kubernetes.io/projected/60b41a10-29ea-46ca-bc08-8c0473394b19-kube-api-access-q6m2q\") pod \"nmstate-metrics-7f946cbc9-r585k\" (UID: \"60b41a10-29ea-46ca-bc08-8c0473394b19\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.604840 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ea22d5fb-23bc-436e-8ab7-80e86c571c28-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.604868 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-94l7h\" (UniqueName: \"kubernetes.io/projected/ea22d5fb-23bc-436e-8ab7-80e86c571c28-kube-api-access-94l7h\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.621842 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.622544 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.624580 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-nmstate"/"nginx-conf" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.624980 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"plugin-serving-cert" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.625006 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-nmstate"/"default-dockercfg-8jpsc" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.638043 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706029 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-ovs-socket\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706284 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q6m2q\" (UniqueName: \"kubernetes.io/projected/60b41a10-29ea-46ca-bc08-8c0473394b19-kube-api-access-q6m2q\") pod \"nmstate-metrics-7f946cbc9-r585k\" (UID: \"60b41a10-29ea-46ca-bc08-8c0473394b19\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706385 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-nmstate-lock\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706481 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-dbus-socket\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706601 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z6jgp\" (UniqueName: \"kubernetes.io/projected/5593be6a-3351-48eb-ac4b-e34d11ac0b49-kube-api-access-z6jgp\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706702 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-94l7h\" (UniqueName: \"kubernetes.io/projected/ea22d5fb-23bc-436e-8ab7-80e86c571c28-kube-api-access-94l7h\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.706812 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ea22d5fb-23bc-436e-8ab7-80e86c571c28-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:26 crc kubenswrapper[4961]: E1205 17:44:26.707000 4961 secret.go:188] Couldn't get secret openshift-nmstate/openshift-nmstate-webhook: secret "openshift-nmstate-webhook" not found Dec 05 17:44:26 crc kubenswrapper[4961]: E1205 17:44:26.707119 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ea22d5fb-23bc-436e-8ab7-80e86c571c28-tls-key-pair podName:ea22d5fb-23bc-436e-8ab7-80e86c571c28 nodeName:}" failed. No retries permitted until 2025-12-05 17:44:27.207100962 +0000 UTC m=+673.268251435 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "tls-key-pair" (UniqueName: "kubernetes.io/secret/ea22d5fb-23bc-436e-8ab7-80e86c571c28-tls-key-pair") pod "nmstate-webhook-5f6d4c5ccb-k6qjw" (UID: "ea22d5fb-23bc-436e-8ab7-80e86c571c28") : secret "openshift-nmstate-webhook" not found Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.725102 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q6m2q\" (UniqueName: \"kubernetes.io/projected/60b41a10-29ea-46ca-bc08-8c0473394b19-kube-api-access-q6m2q\") pod \"nmstate-metrics-7f946cbc9-r585k\" (UID: \"60b41a10-29ea-46ca-bc08-8c0473394b19\") " pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.734047 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-94l7h\" (UniqueName: \"kubernetes.io/projected/ea22d5fb-23bc-436e-8ab7-80e86c571c28-kube-api-access-94l7h\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.780614 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.807970 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808030 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8zn5\" (UniqueName: \"kubernetes.io/projected/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-kube-api-access-l8zn5\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808067 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-dbus-socket\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808100 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z6jgp\" (UniqueName: \"kubernetes.io/projected/5593be6a-3351-48eb-ac4b-e34d11ac0b49-kube-api-access-z6jgp\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808098 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-5d69bfccc8-8dxt6"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808171 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-ovs-socket\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808222 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808268 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-nmstate-lock\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808354 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nmstate-lock\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-nmstate-lock\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.808681 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dbus-socket\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-dbus-socket\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.809067 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-socket\" (UniqueName: \"kubernetes.io/host-path/5593be6a-3351-48eb-ac4b-e34d11ac0b49-ovs-socket\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.814430 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.824811 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5d69bfccc8-8dxt6"] Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.827888 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z6jgp\" (UniqueName: \"kubernetes.io/projected/5593be6a-3351-48eb-ac4b-e34d11ac0b49-kube-api-access-z6jgp\") pod \"nmstate-handler-sp6lw\" (UID: \"5593be6a-3351-48eb-ac4b-e34d11ac0b49\") " pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.918384 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.918649 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8zn5\" (UniqueName: \"kubernetes.io/projected/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-kube-api-access-l8zn5\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.918842 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: E1205 17:44:26.919982 4961 secret.go:188] Couldn't get secret openshift-nmstate/plugin-serving-cert: secret "plugin-serving-cert" not found Dec 05 17:44:26 crc kubenswrapper[4961]: E1205 17:44:26.920879 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-plugin-serving-cert podName:c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7 nodeName:}" failed. No retries permitted until 2025-12-05 17:44:27.420807631 +0000 UTC m=+673.481958104 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "plugin-serving-cert" (UniqueName: "kubernetes.io/secret/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-plugin-serving-cert") pod "nmstate-console-plugin-7fbb5f6569-h4zpz" (UID: "c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7") : secret "plugin-serving-cert" not found Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.932535 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-nginx-conf\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:26 crc kubenswrapper[4961]: I1205 17:44:26.941781 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8zn5\" (UniqueName: \"kubernetes.io/projected/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-kube-api-access-l8zn5\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020543 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dea475a4-eb64-458d-9702-eb6c85d47a77-console-serving-cert\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020589 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dea475a4-eb64-458d-9702-eb6c85d47a77-console-oauth-config\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020606 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-trusted-ca-bundle\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020633 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hz9mw\" (UniqueName: \"kubernetes.io/projected/dea475a4-eb64-458d-9702-eb6c85d47a77-kube-api-access-hz9mw\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020650 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-console-config\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020676 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-oauth-serving-cert\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.020725 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-service-ca\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122328 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hz9mw\" (UniqueName: \"kubernetes.io/projected/dea475a4-eb64-458d-9702-eb6c85d47a77-kube-api-access-hz9mw\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122374 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-console-config\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122402 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-oauth-serving-cert\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122442 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-service-ca\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122490 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dea475a4-eb64-458d-9702-eb6c85d47a77-console-serving-cert\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122506 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dea475a4-eb64-458d-9702-eb6c85d47a77-console-oauth-config\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.122524 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-trusted-ca-bundle\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.123443 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-trusted-ca-bundle\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.123973 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.124363 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-service-ca\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.126424 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/dea475a4-eb64-458d-9702-eb6c85d47a77-console-serving-cert\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.126888 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-oauth-serving-cert\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.127111 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/dea475a4-eb64-458d-9702-eb6c85d47a77-console-oauth-config\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.127213 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/dea475a4-eb64-458d-9702-eb6c85d47a77-console-config\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.141446 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hz9mw\" (UniqueName: \"kubernetes.io/projected/dea475a4-eb64-458d-9702-eb6c85d47a77-kube-api-access-hz9mw\") pod \"console-5d69bfccc8-8dxt6\" (UID: \"dea475a4-eb64-458d-9702-eb6c85d47a77\") " pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: W1205 17:44:27.157742 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5593be6a_3351_48eb_ac4b_e34d11ac0b49.slice/crio-2fed8988d21c82f8959d7dcddccc314c1c35e5a4e6476294e1756a32acd3f5ce WatchSource:0}: Error finding container 2fed8988d21c82f8959d7dcddccc314c1c35e5a4e6476294e1756a32acd3f5ce: Status 404 returned error can't find the container with id 2fed8988d21c82f8959d7dcddccc314c1c35e5a4e6476294e1756a32acd3f5ce Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.168824 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.215735 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-metrics-7f946cbc9-r585k"] Dec 05 17:44:27 crc kubenswrapper[4961]: W1205 17:44:27.216460 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60b41a10_29ea_46ca_bc08_8c0473394b19.slice/crio-3d37c2af47e0a9603bc1d97967790e78ba391789b78d0eaa5d75543516a4270c WatchSource:0}: Error finding container 3d37c2af47e0a9603bc1d97967790e78ba391789b78d0eaa5d75543516a4270c: Status 404 returned error can't find the container with id 3d37c2af47e0a9603bc1d97967790e78ba391789b78d0eaa5d75543516a4270c Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.224221 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ea22d5fb-23bc-436e-8ab7-80e86c571c28-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.229182 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tls-key-pair\" (UniqueName: \"kubernetes.io/secret/ea22d5fb-23bc-436e-8ab7-80e86c571c28-tls-key-pair\") pod \"nmstate-webhook-5f6d4c5ccb-k6qjw\" (UID: \"ea22d5fb-23bc-436e-8ab7-80e86c571c28\") " pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.362939 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-5d69bfccc8-8dxt6"] Dec 05 17:44:27 crc kubenswrapper[4961]: W1205 17:44:27.371275 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poddea475a4_eb64_458d_9702_eb6c85d47a77.slice/crio-ae7b330249245309828e46c7108395f8534dd7bec2ea22079cadc17c8bb5178f WatchSource:0}: Error finding container ae7b330249245309828e46c7108395f8534dd7bec2ea22079cadc17c8bb5178f: Status 404 returned error can't find the container with id ae7b330249245309828e46c7108395f8534dd7bec2ea22079cadc17c8bb5178f Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.410040 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.427232 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.432420 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugin-serving-cert\" (UniqueName: \"kubernetes.io/secret/c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7-plugin-serving-cert\") pod \"nmstate-console-plugin-7fbb5f6569-h4zpz\" (UID: \"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7\") " pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.461470 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" event={"ID":"60b41a10-29ea-46ca-bc08-8c0473394b19","Type":"ContainerStarted","Data":"3d37c2af47e0a9603bc1d97967790e78ba391789b78d0eaa5d75543516a4270c"} Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.462267 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5d69bfccc8-8dxt6" event={"ID":"dea475a4-eb64-458d-9702-eb6c85d47a77","Type":"ContainerStarted","Data":"ae7b330249245309828e46c7108395f8534dd7bec2ea22079cadc17c8bb5178f"} Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.463067 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-sp6lw" event={"ID":"5593be6a-3351-48eb-ac4b-e34d11ac0b49","Type":"ContainerStarted","Data":"2fed8988d21c82f8959d7dcddccc314c1c35e5a4e6476294e1756a32acd3f5ce"} Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.538828 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.615111 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw"] Dec 05 17:44:27 crc kubenswrapper[4961]: I1205 17:44:27.731686 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz"] Dec 05 17:44:28 crc kubenswrapper[4961]: I1205 17:44:28.470893 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-5d69bfccc8-8dxt6" event={"ID":"dea475a4-eb64-458d-9702-eb6c85d47a77","Type":"ContainerStarted","Data":"67b4b111a3818c0b18818bd0e75a68d125102501add951ae428190d526bcb82b"} Dec 05 17:44:28 crc kubenswrapper[4961]: I1205 17:44:28.473034 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" event={"ID":"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7","Type":"ContainerStarted","Data":"115a45255f13bb574dc72d3ee3f1efd87404d10e4ff2643491b04505c079173f"} Dec 05 17:44:28 crc kubenswrapper[4961]: I1205 17:44:28.475571 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" event={"ID":"ea22d5fb-23bc-436e-8ab7-80e86c571c28","Type":"ContainerStarted","Data":"0c0effd48ae363c6f229cde5c92ab59e2cef9f2850178adc2744d64e99f7db0f"} Dec 05 17:44:28 crc kubenswrapper[4961]: I1205 17:44:28.490691 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-5d69bfccc8-8dxt6" podStartSLOduration=2.4906641990000002 podStartE2EDuration="2.490664199s" podCreationTimestamp="2025-12-05 17:44:26 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:44:28.48786656 +0000 UTC m=+674.549017053" watchObservedRunningTime="2025-12-05 17:44:28.490664199 +0000 UTC m=+674.551814692" Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.492722 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-handler-sp6lw" event={"ID":"5593be6a-3351-48eb-ac4b-e34d11ac0b49","Type":"ContainerStarted","Data":"ae891e364d484b95c3515e153db7166eaee86f68c97895423f0c87b5c1c50220"} Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.493448 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.496000 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" event={"ID":"ea22d5fb-23bc-436e-8ab7-80e86c571c28","Type":"ContainerStarted","Data":"2c0b2b90bd064defd7e802a8a35cf3d83b0ca500fd974b30209ed1d3a13a0fca"} Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.496155 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.498960 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" event={"ID":"60b41a10-29ea-46ca-bc08-8c0473394b19","Type":"ContainerStarted","Data":"ecb8bda4ec0df50bf17d92d2ec59ae36b043a1631f6659f48c1e18993f9b10a7"} Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.500725 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" event={"ID":"c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7","Type":"ContainerStarted","Data":"3752bc4ae030c2f5273dac99621b32a0d2c8e0da54792de33dc6a003ca584d8a"} Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.528810 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" podStartSLOduration=2.04491491 podStartE2EDuration="4.528792728s" podCreationTimestamp="2025-12-05 17:44:26 +0000 UTC" firstStartedPulling="2025-12-05 17:44:27.626341939 +0000 UTC m=+673.687492412" lastFinishedPulling="2025-12-05 17:44:30.110219717 +0000 UTC m=+676.171370230" observedRunningTime="2025-12-05 17:44:30.525748523 +0000 UTC m=+676.586899026" watchObservedRunningTime="2025-12-05 17:44:30.528792728 +0000 UTC m=+676.589943211" Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.530487 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-handler-sp6lw" podStartSLOduration=1.615225167 podStartE2EDuration="4.53047791s" podCreationTimestamp="2025-12-05 17:44:26 +0000 UTC" firstStartedPulling="2025-12-05 17:44:27.159547624 +0000 UTC m=+673.220698097" lastFinishedPulling="2025-12-05 17:44:30.074800327 +0000 UTC m=+676.135950840" observedRunningTime="2025-12-05 17:44:30.512206011 +0000 UTC m=+676.573356484" watchObservedRunningTime="2025-12-05 17:44:30.53047791 +0000 UTC m=+676.591628393" Dec 05 17:44:30 crc kubenswrapper[4961]: I1205 17:44:30.545002 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-console-plugin-7fbb5f6569-h4zpz" podStartSLOduration=2.215979201 podStartE2EDuration="4.544985616s" podCreationTimestamp="2025-12-05 17:44:26 +0000 UTC" firstStartedPulling="2025-12-05 17:44:27.746317676 +0000 UTC m=+673.807468159" lastFinishedPulling="2025-12-05 17:44:30.075324101 +0000 UTC m=+676.136474574" observedRunningTime="2025-12-05 17:44:30.543898889 +0000 UTC m=+676.605049382" watchObservedRunningTime="2025-12-05 17:44:30.544985616 +0000 UTC m=+676.606136089" Dec 05 17:44:33 crc kubenswrapper[4961]: I1205 17:44:33.527017 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" event={"ID":"60b41a10-29ea-46ca-bc08-8c0473394b19","Type":"ContainerStarted","Data":"537e89d04f0d73916b4dba85c26d5eaba3459a0deed9979d91edcc5342000426"} Dec 05 17:44:33 crc kubenswrapper[4961]: I1205 17:44:33.545248 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-nmstate/nmstate-metrics-7f946cbc9-r585k" podStartSLOduration=1.517429725 podStartE2EDuration="7.545230807s" podCreationTimestamp="2025-12-05 17:44:26 +0000 UTC" firstStartedPulling="2025-12-05 17:44:27.218369219 +0000 UTC m=+673.279519692" lastFinishedPulling="2025-12-05 17:44:33.246170301 +0000 UTC m=+679.307320774" observedRunningTime="2025-12-05 17:44:33.541323531 +0000 UTC m=+679.602474024" watchObservedRunningTime="2025-12-05 17:44:33.545230807 +0000 UTC m=+679.606381280" Dec 05 17:44:37 crc kubenswrapper[4961]: I1205 17:44:37.150588 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-handler-sp6lw" Dec 05 17:44:37 crc kubenswrapper[4961]: I1205 17:44:37.172107 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:37 crc kubenswrapper[4961]: I1205 17:44:37.172174 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:37 crc kubenswrapper[4961]: I1205 17:44:37.184387 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:37 crc kubenswrapper[4961]: I1205 17:44:37.554788 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-5d69bfccc8-8dxt6" Dec 05 17:44:37 crc kubenswrapper[4961]: I1205 17:44:37.615109 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-sp65b"] Dec 05 17:44:47 crc kubenswrapper[4961]: I1205 17:44:47.419602 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-nmstate/nmstate-webhook-5f6d4c5ccb-k6qjw" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.158566 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf"] Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.160048 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.165767 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf"] Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.169085 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.169356 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.185215 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xhpm\" (UniqueName: \"kubernetes.io/projected/b55278a7-611a-48ff-bf15-07270f3614ed-kube-api-access-9xhpm\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.185283 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b55278a7-611a-48ff-bf15-07270f3614ed-secret-volume\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.185315 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b55278a7-611a-48ff-bf15-07270f3614ed-config-volume\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.286405 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b55278a7-611a-48ff-bf15-07270f3614ed-secret-volume\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.286463 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b55278a7-611a-48ff-bf15-07270f3614ed-config-volume\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.286513 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xhpm\" (UniqueName: \"kubernetes.io/projected/b55278a7-611a-48ff-bf15-07270f3614ed-kube-api-access-9xhpm\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.287724 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b55278a7-611a-48ff-bf15-07270f3614ed-config-volume\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.307099 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b55278a7-611a-48ff-bf15-07270f3614ed-secret-volume\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.310089 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xhpm\" (UniqueName: \"kubernetes.io/projected/b55278a7-611a-48ff-bf15-07270f3614ed-kube-api-access-9xhpm\") pod \"collect-profiles-29415945-24bgf\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.489517 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:00 crc kubenswrapper[4961]: I1205 17:45:00.709615 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf"] Dec 05 17:45:01 crc kubenswrapper[4961]: I1205 17:45:01.700359 4961 generic.go:334] "Generic (PLEG): container finished" podID="b55278a7-611a-48ff-bf15-07270f3614ed" containerID="26f55e6efde06fc09d4ab4da8729d7bfad995984a7933fbc8b315af12687863d" exitCode=0 Dec 05 17:45:01 crc kubenswrapper[4961]: I1205 17:45:01.700424 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" event={"ID":"b55278a7-611a-48ff-bf15-07270f3614ed","Type":"ContainerDied","Data":"26f55e6efde06fc09d4ab4da8729d7bfad995984a7933fbc8b315af12687863d"} Dec 05 17:45:01 crc kubenswrapper[4961]: I1205 17:45:01.700485 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" event={"ID":"b55278a7-611a-48ff-bf15-07270f3614ed","Type":"ContainerStarted","Data":"f3ea2b0583368ace953e1e51df1736171eee85c38fea74d1423d0b72ecd1cf27"} Dec 05 17:45:02 crc kubenswrapper[4961]: I1205 17:45:02.655536 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/console-f9d7485db-sp65b" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerName="console" containerID="cri-o://01177853a5e0acb043304353fbac8e83c80c7bb7d40d7a7190ceb58e3214faac" gracePeriod=15 Dec 05 17:45:02 crc kubenswrapper[4961]: I1205 17:45:02.926697 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.125939 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xhpm\" (UniqueName: \"kubernetes.io/projected/b55278a7-611a-48ff-bf15-07270f3614ed-kube-api-access-9xhpm\") pod \"b55278a7-611a-48ff-bf15-07270f3614ed\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.126028 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b55278a7-611a-48ff-bf15-07270f3614ed-secret-volume\") pod \"b55278a7-611a-48ff-bf15-07270f3614ed\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.126082 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b55278a7-611a-48ff-bf15-07270f3614ed-config-volume\") pod \"b55278a7-611a-48ff-bf15-07270f3614ed\" (UID: \"b55278a7-611a-48ff-bf15-07270f3614ed\") " Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.126678 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b55278a7-611a-48ff-bf15-07270f3614ed-config-volume" (OuterVolumeSpecName: "config-volume") pod "b55278a7-611a-48ff-bf15-07270f3614ed" (UID: "b55278a7-611a-48ff-bf15-07270f3614ed"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.132049 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b55278a7-611a-48ff-bf15-07270f3614ed-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b55278a7-611a-48ff-bf15-07270f3614ed" (UID: "b55278a7-611a-48ff-bf15-07270f3614ed"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.132095 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b55278a7-611a-48ff-bf15-07270f3614ed-kube-api-access-9xhpm" (OuterVolumeSpecName: "kube-api-access-9xhpm") pod "b55278a7-611a-48ff-bf15-07270f3614ed" (UID: "b55278a7-611a-48ff-bf15-07270f3614ed"). InnerVolumeSpecName "kube-api-access-9xhpm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.227467 4961 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b55278a7-611a-48ff-bf15-07270f3614ed-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.227510 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b55278a7-611a-48ff-bf15-07270f3614ed-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.227520 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xhpm\" (UniqueName: \"kubernetes.io/projected/b55278a7-611a-48ff-bf15-07270f3614ed-kube-api-access-9xhpm\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.425464 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz"] Dec 05 17:45:03 crc kubenswrapper[4961]: E1205 17:45:03.425917 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b55278a7-611a-48ff-bf15-07270f3614ed" containerName="collect-profiles" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.425986 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b55278a7-611a-48ff-bf15-07270f3614ed" containerName="collect-profiles" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.426143 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b55278a7-611a-48ff-bf15-07270f3614ed" containerName="collect-profiles" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.426867 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.428932 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.429251 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.429325 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.429356 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ftdvz\" (UniqueName: \"kubernetes.io/projected/9f04f5e2-a474-427b-a466-77d789a6daa7-kube-api-access-ftdvz\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.438258 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz"] Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.530537 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.530656 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.530699 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ftdvz\" (UniqueName: \"kubernetes.io/projected/9f04f5e2-a474-427b-a466-77d789a6daa7-kube-api-access-ftdvz\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.531422 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-util\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.531573 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-bundle\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.551469 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ftdvz\" (UniqueName: \"kubernetes.io/projected/9f04f5e2-a474-427b-a466-77d789a6daa7-kube-api-access-ftdvz\") pod \"af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.711711 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sp65b_517806cd-8b4c-4e6c-9edf-7dbcd125e0ee/console/0.log" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.711761 4961 generic.go:334] "Generic (PLEG): container finished" podID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerID="01177853a5e0acb043304353fbac8e83c80c7bb7d40d7a7190ceb58e3214faac" exitCode=2 Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.711807 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sp65b" event={"ID":"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee","Type":"ContainerDied","Data":"01177853a5e0acb043304353fbac8e83c80c7bb7d40d7a7190ceb58e3214faac"} Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.713944 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" event={"ID":"b55278a7-611a-48ff-bf15-07270f3614ed","Type":"ContainerDied","Data":"f3ea2b0583368ace953e1e51df1736171eee85c38fea74d1423d0b72ecd1cf27"} Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.713982 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f3ea2b0583368ace953e1e51df1736171eee85c38fea74d1423d0b72ecd1cf27" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.713992 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.749168 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:03 crc kubenswrapper[4961]: E1205 17:45:03.850259 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb55278a7_611a_48ff_bf15_07270f3614ed.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb55278a7_611a_48ff_bf15_07270f3614ed.slice/crio-f3ea2b0583368ace953e1e51df1736171eee85c38fea74d1423d0b72ecd1cf27\": RecentStats: unable to find data in memory cache]" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.936226 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sp65b_517806cd-8b4c-4e6c-9edf-7dbcd125e0ee/console/0.log" Dec 05 17:45:03 crc kubenswrapper[4961]: I1205 17:45:03.936594 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.041231 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz"] Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.136557 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-service-ca\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.136608 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-serving-cert\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.136673 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-oauth-serving-cert\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.136710 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-trusted-ca-bundle\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.137476 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-service-ca" (OuterVolumeSpecName: "service-ca") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.137650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.137732 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-oauth-config\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.138022 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.137766 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-config\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.138255 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8hnqz\" (UniqueName: \"kubernetes.io/projected/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-kube-api-access-8hnqz\") pod \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\" (UID: \"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee\") " Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.138659 4961 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-service-ca\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.138716 4961 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.138731 4961 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.138666 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-config" (OuterVolumeSpecName: "console-config") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.141534 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.143061 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.143327 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-kube-api-access-8hnqz" (OuterVolumeSpecName: "kube-api-access-8hnqz") pod "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" (UID: "517806cd-8b4c-4e6c-9edf-7dbcd125e0ee"). InnerVolumeSpecName "kube-api-access-8hnqz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.239572 4961 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-oauth-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.239618 4961 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.239629 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8hnqz\" (UniqueName: \"kubernetes.io/projected/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-kube-api-access-8hnqz\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.239640 4961 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee-console-serving-cert\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.721498 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-console_console-f9d7485db-sp65b_517806cd-8b4c-4e6c-9edf-7dbcd125e0ee/console/0.log" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.721817 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-sp65b" event={"ID":"517806cd-8b4c-4e6c-9edf-7dbcd125e0ee","Type":"ContainerDied","Data":"2d2029e80d8596f43285e2dc5669455097f1d3efa1f2cebe2650534078b240d7"} Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.721864 4961 scope.go:117] "RemoveContainer" containerID="01177853a5e0acb043304353fbac8e83c80c7bb7d40d7a7190ceb58e3214faac" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.721982 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-sp65b" Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.755149 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerStarted","Data":"e47dbca3b33d0b822acd620474bdb3969c3783803a0017ece25a41021494060f"} Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.755192 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerStarted","Data":"80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549"} Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.940386 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-console/console-f9d7485db-sp65b"] Dec 05 17:45:04 crc kubenswrapper[4961]: I1205 17:45:04.943455 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-console/console-f9d7485db-sp65b"] Dec 05 17:45:05 crc kubenswrapper[4961]: I1205 17:45:05.762003 4961 generic.go:334] "Generic (PLEG): container finished" podID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerID="e47dbca3b33d0b822acd620474bdb3969c3783803a0017ece25a41021494060f" exitCode=0 Dec 05 17:45:05 crc kubenswrapper[4961]: I1205 17:45:05.762042 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerDied","Data":"e47dbca3b33d0b822acd620474bdb3969c3783803a0017ece25a41021494060f"} Dec 05 17:45:06 crc kubenswrapper[4961]: I1205 17:45:06.870941 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" path="/var/lib/kubelet/pods/517806cd-8b4c-4e6c-9edf-7dbcd125e0ee/volumes" Dec 05 17:45:09 crc kubenswrapper[4961]: I1205 17:45:09.782422 4961 generic.go:334] "Generic (PLEG): container finished" podID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerID="2ed7f9c6609cdbc5debc63e4a0be866932c93765312963a75881bf02b22041c2" exitCode=0 Dec 05 17:45:09 crc kubenswrapper[4961]: I1205 17:45:09.782543 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerDied","Data":"2ed7f9c6609cdbc5debc63e4a0be866932c93765312963a75881bf02b22041c2"} Dec 05 17:45:11 crc kubenswrapper[4961]: I1205 17:45:11.795662 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerStarted","Data":"769ebdc9ab87f9fe970a79d6edd59746fe0eb46027cb6bac15486f561038784b"} Dec 05 17:45:11 crc kubenswrapper[4961]: I1205 17:45:11.829509 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" podStartSLOduration=5.242786221 podStartE2EDuration="8.829477246s" podCreationTimestamp="2025-12-05 17:45:03 +0000 UTC" firstStartedPulling="2025-12-05 17:45:05.764370247 +0000 UTC m=+711.825520720" lastFinishedPulling="2025-12-05 17:45:09.351061272 +0000 UTC m=+715.412211745" observedRunningTime="2025-12-05 17:45:11.824158895 +0000 UTC m=+717.885309368" watchObservedRunningTime="2025-12-05 17:45:11.829477246 +0000 UTC m=+717.890627749" Dec 05 17:45:12 crc kubenswrapper[4961]: I1205 17:45:12.805465 4961 generic.go:334] "Generic (PLEG): container finished" podID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerID="769ebdc9ab87f9fe970a79d6edd59746fe0eb46027cb6bac15486f561038784b" exitCode=0 Dec 05 17:45:12 crc kubenswrapper[4961]: I1205 17:45:12.805547 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerDied","Data":"769ebdc9ab87f9fe970a79d6edd59746fe0eb46027cb6bac15486f561038784b"} Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.031047 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.158420 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ftdvz\" (UniqueName: \"kubernetes.io/projected/9f04f5e2-a474-427b-a466-77d789a6daa7-kube-api-access-ftdvz\") pod \"9f04f5e2-a474-427b-a466-77d789a6daa7\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.158490 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-util\") pod \"9f04f5e2-a474-427b-a466-77d789a6daa7\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.158508 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-bundle\") pod \"9f04f5e2-a474-427b-a466-77d789a6daa7\" (UID: \"9f04f5e2-a474-427b-a466-77d789a6daa7\") " Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.159635 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-bundle" (OuterVolumeSpecName: "bundle") pod "9f04f5e2-a474-427b-a466-77d789a6daa7" (UID: "9f04f5e2-a474-427b-a466-77d789a6daa7"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.169189 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f04f5e2-a474-427b-a466-77d789a6daa7-kube-api-access-ftdvz" (OuterVolumeSpecName: "kube-api-access-ftdvz") pod "9f04f5e2-a474-427b-a466-77d789a6daa7" (UID: "9f04f5e2-a474-427b-a466-77d789a6daa7"). InnerVolumeSpecName "kube-api-access-ftdvz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.171026 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-util" (OuterVolumeSpecName: "util") pod "9f04f5e2-a474-427b-a466-77d789a6daa7" (UID: "9f04f5e2-a474-427b-a466-77d789a6daa7"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.259452 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ftdvz\" (UniqueName: \"kubernetes.io/projected/9f04f5e2-a474-427b-a466-77d789a6daa7-kube-api-access-ftdvz\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.259488 4961 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.259497 4961 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9f04f5e2-a474-427b-a466-77d789a6daa7-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.819914 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" event={"ID":"9f04f5e2-a474-427b-a466-77d789a6daa7","Type":"ContainerDied","Data":"80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549"} Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.819968 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549" Dec 05 17:45:14 crc kubenswrapper[4961]: I1205 17:45:14.820002 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz" Dec 05 17:45:24 crc kubenswrapper[4961]: E1205 17:45:24.124674 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice/crio-80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.539714 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn"] Dec 05 17:45:26 crc kubenswrapper[4961]: E1205 17:45:26.540005 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="extract" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540022 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="extract" Dec 05 17:45:26 crc kubenswrapper[4961]: E1205 17:45:26.540038 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerName="console" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540046 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerName="console" Dec 05 17:45:26 crc kubenswrapper[4961]: E1205 17:45:26.540059 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="pull" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540067 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="pull" Dec 05 17:45:26 crc kubenswrapper[4961]: E1205 17:45:26.540078 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="util" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540086 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="util" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540203 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f04f5e2-a474-427b-a466-77d789a6daa7" containerName="extract" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540222 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="517806cd-8b4c-4e6c-9edf-7dbcd125e0ee" containerName="console" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.540705 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.543239 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.543428 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.543427 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.543549 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-q22ft" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.544925 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.561088 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn"] Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.621201 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1dcf69fa-3afe-4fb7-a64b-838dbab83937-webhook-cert\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.621259 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5ptr7\" (UniqueName: \"kubernetes.io/projected/1dcf69fa-3afe-4fb7-a64b-838dbab83937-kube-api-access-5ptr7\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.621334 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1dcf69fa-3afe-4fb7-a64b-838dbab83937-apiservice-cert\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.723001 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1dcf69fa-3afe-4fb7-a64b-838dbab83937-webhook-cert\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.723073 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5ptr7\" (UniqueName: \"kubernetes.io/projected/1dcf69fa-3afe-4fb7-a64b-838dbab83937-kube-api-access-5ptr7\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.723126 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1dcf69fa-3afe-4fb7-a64b-838dbab83937-apiservice-cert\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.731908 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1dcf69fa-3afe-4fb7-a64b-838dbab83937-apiservice-cert\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.735750 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1dcf69fa-3afe-4fb7-a64b-838dbab83937-webhook-cert\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.766062 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5ptr7\" (UniqueName: \"kubernetes.io/projected/1dcf69fa-3afe-4fb7-a64b-838dbab83937-kube-api-access-5ptr7\") pod \"metallb-operator-controller-manager-67f59cc659-4mmbn\" (UID: \"1dcf69fa-3afe-4fb7-a64b-838dbab83937\") " pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.858035 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.969418 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j"] Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.970093 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.972132 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.974293 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.974656 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-8l7j4" Dec 05 17:45:26 crc kubenswrapper[4961]: I1205 17:45:26.994380 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j"] Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.026563 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7rcnz\" (UniqueName: \"kubernetes.io/projected/6139f869-6350-4649-a6fd-a969bf96e18a-kube-api-access-7rcnz\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.026659 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6139f869-6350-4649-a6fd-a969bf96e18a-apiservice-cert\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.026699 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6139f869-6350-4649-a6fd-a969bf96e18a-webhook-cert\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.129492 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6139f869-6350-4649-a6fd-a969bf96e18a-apiservice-cert\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.129737 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6139f869-6350-4649-a6fd-a969bf96e18a-webhook-cert\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.129832 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7rcnz\" (UniqueName: \"kubernetes.io/projected/6139f869-6350-4649-a6fd-a969bf96e18a-kube-api-access-7rcnz\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.134344 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6139f869-6350-4649-a6fd-a969bf96e18a-webhook-cert\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.136676 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6139f869-6350-4649-a6fd-a969bf96e18a-apiservice-cert\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.145533 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7rcnz\" (UniqueName: \"kubernetes.io/projected/6139f869-6350-4649-a6fd-a969bf96e18a-kube-api-access-7rcnz\") pod \"metallb-operator-webhook-server-7c6bcfcd7d-5ng2j\" (UID: \"6139f869-6350-4649-a6fd-a969bf96e18a\") " pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.201624 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn"] Dec 05 17:45:27 crc kubenswrapper[4961]: W1205 17:45:27.211305 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod1dcf69fa_3afe_4fb7_a64b_838dbab83937.slice/crio-3f757454d9597742ef5b5230a9b1e1f024fddd974d0200eb4ed3d880cd8c6a66 WatchSource:0}: Error finding container 3f757454d9597742ef5b5230a9b1e1f024fddd974d0200eb4ed3d880cd8c6a66: Status 404 returned error can't find the container with id 3f757454d9597742ef5b5230a9b1e1f024fddd974d0200eb4ed3d880cd8c6a66 Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.284754 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.680909 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j"] Dec 05 17:45:27 crc kubenswrapper[4961]: W1205 17:45:27.682054 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod6139f869_6350_4649_a6fd_a969bf96e18a.slice/crio-86c6c39cc6af6a638053dd0a2a510c4bdc472112c647dadcaf057283a620a1dd WatchSource:0}: Error finding container 86c6c39cc6af6a638053dd0a2a510c4bdc472112c647dadcaf057283a620a1dd: Status 404 returned error can't find the container with id 86c6c39cc6af6a638053dd0a2a510c4bdc472112c647dadcaf057283a620a1dd Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.913114 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" event={"ID":"6139f869-6350-4649-a6fd-a969bf96e18a","Type":"ContainerStarted","Data":"86c6c39cc6af6a638053dd0a2a510c4bdc472112c647dadcaf057283a620a1dd"} Dec 05 17:45:27 crc kubenswrapper[4961]: I1205 17:45:27.914481 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" event={"ID":"1dcf69fa-3afe-4fb7-a64b-838dbab83937","Type":"ContainerStarted","Data":"3f757454d9597742ef5b5230a9b1e1f024fddd974d0200eb4ed3d880cd8c6a66"} Dec 05 17:45:33 crc kubenswrapper[4961]: I1205 17:45:33.993458 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" event={"ID":"1dcf69fa-3afe-4fb7-a64b-838dbab83937","Type":"ContainerStarted","Data":"fd7763275097d06ea5f1d8721772e2a0545e63f510d88b17656fd1da335adc07"} Dec 05 17:45:33 crc kubenswrapper[4961]: I1205 17:45:33.994046 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:45:33 crc kubenswrapper[4961]: I1205 17:45:33.996012 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" event={"ID":"6139f869-6350-4649-a6fd-a969bf96e18a","Type":"ContainerStarted","Data":"e1af4f6d2834331a200e6bdc5eeacc27f921f3f2a1867fa0e41f731384bafe5c"} Dec 05 17:45:33 crc kubenswrapper[4961]: I1205 17:45:33.996251 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:34 crc kubenswrapper[4961]: I1205 17:45:34.018820 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" podStartSLOduration=1.864999191 podStartE2EDuration="8.018804s" podCreationTimestamp="2025-12-05 17:45:26 +0000 UTC" firstStartedPulling="2025-12-05 17:45:27.218222095 +0000 UTC m=+733.279372568" lastFinishedPulling="2025-12-05 17:45:33.372026904 +0000 UTC m=+739.433177377" observedRunningTime="2025-12-05 17:45:34.014821822 +0000 UTC m=+740.075972315" watchObservedRunningTime="2025-12-05 17:45:34.018804 +0000 UTC m=+740.079954463" Dec 05 17:45:34 crc kubenswrapper[4961]: I1205 17:45:34.036548 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" podStartSLOduration=2.279891683 podStartE2EDuration="8.036521706s" podCreationTimestamp="2025-12-05 17:45:26 +0000 UTC" firstStartedPulling="2025-12-05 17:45:27.684538964 +0000 UTC m=+733.745689437" lastFinishedPulling="2025-12-05 17:45:33.441168977 +0000 UTC m=+739.502319460" observedRunningTime="2025-12-05 17:45:34.032845246 +0000 UTC m=+740.093995739" watchObservedRunningTime="2025-12-05 17:45:34.036521706 +0000 UTC m=+740.097672189" Dec 05 17:45:34 crc kubenswrapper[4961]: E1205 17:45:34.267655 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice/crio-80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:45:44 crc kubenswrapper[4961]: E1205 17:45:44.420091 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice/crio-80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:45:47 crc kubenswrapper[4961]: I1205 17:45:47.288982 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-7c6bcfcd7d-5ng2j" Dec 05 17:45:50 crc kubenswrapper[4961]: I1205 17:45:50.563742 4961 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Dec 05 17:45:54 crc kubenswrapper[4961]: E1205 17:45:54.557034 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice/crio-80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549\": RecentStats: unable to find data in memory cache]" Dec 05 17:46:04 crc kubenswrapper[4961]: E1205 17:46:04.688642 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice/crio-80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549\": RecentStats: unable to find data in memory cache]" Dec 05 17:46:06 crc kubenswrapper[4961]: I1205 17:46:06.860610 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-67f59cc659-4mmbn" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.662156 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-bfzd4"] Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.665235 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.668035 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-7xpcc" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.671970 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.680324 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.700075 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2"] Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.700878 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.708113 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722065 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-startup\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722123 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722155 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-sockets\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722190 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-reloader\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722204 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2"] Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722237 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics-certs\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722274 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-conf\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.722298 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l74sc\" (UniqueName: \"kubernetes.io/projected/17678ad4-8645-4f4f-a752-c1f92d4610f6-kube-api-access-l74sc\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.805927 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-pcwhx"] Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.807072 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-pcwhx" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.808831 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.809133 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.809305 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.809561 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-smzgt" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823106 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-f8648f98b-k6nwk"] Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823165 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics-certs\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823224 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-conf\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823245 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l74sc\" (UniqueName: \"kubernetes.io/projected/17678ad4-8645-4f4f-a752-c1f92d4610f6-kube-api-access-l74sc\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823284 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-startup\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823323 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mb6hb\" (UniqueName: \"kubernetes.io/projected/013262fd-338d-4a14-89f8-d682d09916f2-kube-api-access-mb6hb\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823354 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823385 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-sockets\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823411 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/013262fd-338d-4a14-89f8-d682d09916f2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823434 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-reloader\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.823963 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-reloader\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.824229 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.824465 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-sockets\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.824726 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-conf\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: E1205 17:46:07.824833 4961 secret.go:188] Couldn't get secret metallb-system/frr-k8s-certs-secret: secret "frr-k8s-certs-secret" not found Dec 05 17:46:07 crc kubenswrapper[4961]: E1205 17:46:07.824884 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics-certs podName:17678ad4-8645-4f4f-a752-c1f92d4610f6 nodeName:}" failed. No retries permitted until 2025-12-05 17:46:08.324865113 +0000 UTC m=+774.386015586 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics-certs") pod "frr-k8s-bfzd4" (UID: "17678ad4-8645-4f4f-a752-c1f92d4610f6") : secret "frr-k8s-certs-secret" not found Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.825045 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/17678ad4-8645-4f4f-a752-c1f92d4610f6-frr-startup\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.827053 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.830141 4961 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.839460 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-k6nwk"] Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.849567 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l74sc\" (UniqueName: \"kubernetes.io/projected/17678ad4-8645-4f4f-a752-c1f92d4610f6-kube-api-access-l74sc\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.924995 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/013262fd-338d-4a14-89f8-d682d09916f2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925384 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xs55\" (UniqueName: \"kubernetes.io/projected/b1a0921b-0a54-4163-8931-0b6ef9dd1051-kube-api-access-9xs55\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925425 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-metrics-certs\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925447 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f6c280d-42e8-40a5-b11c-e9a80f81125b-cert\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925506 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f6c280d-42e8-40a5-b11c-e9a80f81125b-metrics-certs\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925546 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mb6hb\" (UniqueName: \"kubernetes.io/projected/013262fd-338d-4a14-89f8-d682d09916f2-kube-api-access-mb6hb\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925565 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925583 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b1a0921b-0a54-4163-8931-0b6ef9dd1051-metallb-excludel2\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.925605 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8g8h2\" (UniqueName: \"kubernetes.io/projected/3f6c280d-42e8-40a5-b11c-e9a80f81125b-kube-api-access-8g8h2\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:07 crc kubenswrapper[4961]: E1205 17:46:07.925189 4961 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Dec 05 17:46:07 crc kubenswrapper[4961]: E1205 17:46:07.925863 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/013262fd-338d-4a14-89f8-d682d09916f2-cert podName:013262fd-338d-4a14-89f8-d682d09916f2 nodeName:}" failed. No retries permitted until 2025-12-05 17:46:08.42583912 +0000 UTC m=+774.486989683 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/013262fd-338d-4a14-89f8-d682d09916f2-cert") pod "frr-k8s-webhook-server-7fcb986d4-gsnw2" (UID: "013262fd-338d-4a14-89f8-d682d09916f2") : secret "frr-k8s-webhook-server-cert" not found Dec 05 17:46:07 crc kubenswrapper[4961]: I1205 17:46:07.945549 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mb6hb\" (UniqueName: \"kubernetes.io/projected/013262fd-338d-4a14-89f8-d682d09916f2-kube-api-access-mb6hb\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.027353 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f6c280d-42e8-40a5-b11c-e9a80f81125b-metrics-certs\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.027688 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.027800 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b1a0921b-0a54-4163-8931-0b6ef9dd1051-metallb-excludel2\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: E1205 17:46:08.027876 4961 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.027962 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8g8h2\" (UniqueName: \"kubernetes.io/projected/3f6c280d-42e8-40a5-b11c-e9a80f81125b-kube-api-access-8g8h2\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.028120 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xs55\" (UniqueName: \"kubernetes.io/projected/b1a0921b-0a54-4163-8931-0b6ef9dd1051-kube-api-access-9xs55\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.028238 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-metrics-certs\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.028336 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f6c280d-42e8-40a5-b11c-e9a80f81125b-cert\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: E1205 17:46:08.028522 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist podName:b1a0921b-0a54-4163-8931-0b6ef9dd1051 nodeName:}" failed. No retries permitted until 2025-12-05 17:46:08.52850393 +0000 UTC m=+774.589654403 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist") pod "speaker-pcwhx" (UID: "b1a0921b-0a54-4163-8931-0b6ef9dd1051") : secret "metallb-memberlist" not found Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.028863 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/b1a0921b-0a54-4163-8931-0b6ef9dd1051-metallb-excludel2\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.033256 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-metrics-certs\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.033479 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/3f6c280d-42e8-40a5-b11c-e9a80f81125b-cert\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.033478 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3f6c280d-42e8-40a5-b11c-e9a80f81125b-metrics-certs\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.050123 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8g8h2\" (UniqueName: \"kubernetes.io/projected/3f6c280d-42e8-40a5-b11c-e9a80f81125b-kube-api-access-8g8h2\") pod \"controller-f8648f98b-k6nwk\" (UID: \"3f6c280d-42e8-40a5-b11c-e9a80f81125b\") " pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.051525 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xs55\" (UniqueName: \"kubernetes.io/projected/b1a0921b-0a54-4163-8931-0b6ef9dd1051-kube-api-access-9xs55\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.142577 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.332679 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics-certs\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.336620 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/17678ad4-8645-4f4f-a752-c1f92d4610f6-metrics-certs\") pod \"frr-k8s-bfzd4\" (UID: \"17678ad4-8645-4f4f-a752-c1f92d4610f6\") " pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.434783 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/013262fd-338d-4a14-89f8-d682d09916f2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.438754 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/013262fd-338d-4a14-89f8-d682d09916f2-cert\") pod \"frr-k8s-webhook-server-7fcb986d4-gsnw2\" (UID: \"013262fd-338d-4a14-89f8-d682d09916f2\") " pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.535865 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:08 crc kubenswrapper[4961]: E1205 17:46:08.536082 4961 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Dec 05 17:46:08 crc kubenswrapper[4961]: E1205 17:46:08.536200 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist podName:b1a0921b-0a54-4163-8931-0b6ef9dd1051 nodeName:}" failed. No retries permitted until 2025-12-05 17:46:09.536172778 +0000 UTC m=+775.597323251 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist") pod "speaker-pcwhx" (UID: "b1a0921b-0a54-4163-8931-0b6ef9dd1051") : secret "metallb-memberlist" not found Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.586435 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.619137 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:08 crc kubenswrapper[4961]: I1205 17:46:08.638166 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-f8648f98b-k6nwk"] Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.047222 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2"] Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.202656 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"452977cdc5b0d5b06b624a93539cd8b941810b4f0b6ee2e73380fab2a7ad9be4"} Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.204577 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-k6nwk" event={"ID":"3f6c280d-42e8-40a5-b11c-e9a80f81125b","Type":"ContainerStarted","Data":"31837a1298ffb075857e76dfb3eea2bb7a31cac8b0d3f94f19a3bf673f83a664"} Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.204635 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-k6nwk" event={"ID":"3f6c280d-42e8-40a5-b11c-e9a80f81125b","Type":"ContainerStarted","Data":"8c96941f52b999a2de5e1cc4278ca3e8e2fd6c07cc129bfda5cd8d4d1a359524"} Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.204648 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-f8648f98b-k6nwk" event={"ID":"3f6c280d-42e8-40a5-b11c-e9a80f81125b","Type":"ContainerStarted","Data":"cfdb94ad7de3447cd424fb934bb993d4efbf3013d051bf82c1e215fa5b69fc93"} Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.204721 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.206006 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" event={"ID":"013262fd-338d-4a14-89f8-d682d09916f2","Type":"ContainerStarted","Data":"1d706c3285d564bb6b536c73287c476770b2be7d9aaaa0e9bd9d63b0f5282866"} Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.223139 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-f8648f98b-k6nwk" podStartSLOduration=2.2231155830000002 podStartE2EDuration="2.223115583s" podCreationTimestamp="2025-12-05 17:46:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:46:09.221500453 +0000 UTC m=+775.282650936" watchObservedRunningTime="2025-12-05 17:46:09.223115583 +0000 UTC m=+775.284266056" Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.549921 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.556203 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/b1a0921b-0a54-4163-8931-0b6ef9dd1051-memberlist\") pod \"speaker-pcwhx\" (UID: \"b1a0921b-0a54-4163-8931-0b6ef9dd1051\") " pod="metallb-system/speaker-pcwhx" Dec 05 17:46:09 crc kubenswrapper[4961]: I1205 17:46:09.623163 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-pcwhx" Dec 05 17:46:09 crc kubenswrapper[4961]: W1205 17:46:09.644805 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1a0921b_0a54_4163_8931_0b6ef9dd1051.slice/crio-10b8c12e42aa5ce9d8880d45a0d67d25b035bb2cf0d58e4f8f7f0510d2039211 WatchSource:0}: Error finding container 10b8c12e42aa5ce9d8880d45a0d67d25b035bb2cf0d58e4f8f7f0510d2039211: Status 404 returned error can't find the container with id 10b8c12e42aa5ce9d8880d45a0d67d25b035bb2cf0d58e4f8f7f0510d2039211 Dec 05 17:46:10 crc kubenswrapper[4961]: I1205 17:46:10.226822 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-pcwhx" event={"ID":"b1a0921b-0a54-4163-8931-0b6ef9dd1051","Type":"ContainerStarted","Data":"ca820e2bbf8e9dfa65c492c17113465cc962ebda2567f984c4f9c168a2cb1547"} Dec 05 17:46:10 crc kubenswrapper[4961]: I1205 17:46:10.227181 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-pcwhx" event={"ID":"b1a0921b-0a54-4163-8931-0b6ef9dd1051","Type":"ContainerStarted","Data":"fe6b8c49dd47e97662ef2a01d7d1474d41fdff8f6981ef44b5c1b806f9e19c18"} Dec 05 17:46:10 crc kubenswrapper[4961]: I1205 17:46:10.227198 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-pcwhx" event={"ID":"b1a0921b-0a54-4163-8931-0b6ef9dd1051","Type":"ContainerStarted","Data":"10b8c12e42aa5ce9d8880d45a0d67d25b035bb2cf0d58e4f8f7f0510d2039211"} Dec 05 17:46:10 crc kubenswrapper[4961]: I1205 17:46:10.227806 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-pcwhx" Dec 05 17:46:10 crc kubenswrapper[4961]: I1205 17:46:10.255852 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-pcwhx" podStartSLOduration=3.255757186 podStartE2EDuration="3.255757186s" podCreationTimestamp="2025-12-05 17:46:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:46:10.245738959 +0000 UTC m=+776.306889452" watchObservedRunningTime="2025-12-05 17:46:10.255757186 +0000 UTC m=+776.316907659" Dec 05 17:46:14 crc kubenswrapper[4961]: E1205 17:46:14.862550 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice/crio-80ec76b74ec11ed5a1926438dcf00371dc5650babef73c24fbca92534f195549\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9f04f5e2_a474_427b_a466_77d789a6daa7.slice\": RecentStats: unable to find data in memory cache]" Dec 05 17:46:18 crc kubenswrapper[4961]: I1205 17:46:18.149035 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-f8648f98b-k6nwk" Dec 05 17:46:19 crc kubenswrapper[4961]: I1205 17:46:19.285812 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" event={"ID":"013262fd-338d-4a14-89f8-d682d09916f2","Type":"ContainerStarted","Data":"85050cd6f9af472a1c99a90ab93b12b58d61c34977de6342bb9d998a6cefc866"} Dec 05 17:46:19 crc kubenswrapper[4961]: I1205 17:46:19.286693 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:19 crc kubenswrapper[4961]: I1205 17:46:19.287545 4961 generic.go:334] "Generic (PLEG): container finished" podID="17678ad4-8645-4f4f-a752-c1f92d4610f6" containerID="50838fe8b920b90bd8b0938ef90cbab7930da906b78a0c3cd0871a49b140e21d" exitCode=0 Dec 05 17:46:19 crc kubenswrapper[4961]: I1205 17:46:19.287588 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerDied","Data":"50838fe8b920b90bd8b0938ef90cbab7930da906b78a0c3cd0871a49b140e21d"} Dec 05 17:46:19 crc kubenswrapper[4961]: I1205 17:46:19.327319 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" podStartSLOduration=3.051410976 podStartE2EDuration="12.327301475s" podCreationTimestamp="2025-12-05 17:46:07 +0000 UTC" firstStartedPulling="2025-12-05 17:46:09.055830171 +0000 UTC m=+775.116980644" lastFinishedPulling="2025-12-05 17:46:18.33172067 +0000 UTC m=+784.392871143" observedRunningTime="2025-12-05 17:46:19.306987684 +0000 UTC m=+785.368138157" watchObservedRunningTime="2025-12-05 17:46:19.327301475 +0000 UTC m=+785.388451948" Dec 05 17:46:19 crc kubenswrapper[4961]: I1205 17:46:19.628990 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-pcwhx" Dec 05 17:46:20 crc kubenswrapper[4961]: I1205 17:46:20.293865 4961 generic.go:334] "Generic (PLEG): container finished" podID="17678ad4-8645-4f4f-a752-c1f92d4610f6" containerID="8a6e2a9d7e5d708be17c879f4fdc72b812b4610fdc4cc45416f912c66b23ac57" exitCode=0 Dec 05 17:46:20 crc kubenswrapper[4961]: I1205 17:46:20.293969 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerDied","Data":"8a6e2a9d7e5d708be17c879f4fdc72b812b4610fdc4cc45416f912c66b23ac57"} Dec 05 17:46:21 crc kubenswrapper[4961]: I1205 17:46:21.300718 4961 generic.go:334] "Generic (PLEG): container finished" podID="17678ad4-8645-4f4f-a752-c1f92d4610f6" containerID="0334530d083d73b22f4e9d92cb0b24a24960778f248c46a13d6ec10e0aaf1a38" exitCode=0 Dec 05 17:46:21 crc kubenswrapper[4961]: I1205 17:46:21.300818 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerDied","Data":"0334530d083d73b22f4e9d92cb0b24a24960778f248c46a13d6ec10e0aaf1a38"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.312492 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"4382a27888914f47a1fddb6cc760be3953a79c399a984cb3a17c020b85e0c5f7"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.312896 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"ce23573e76db51b1e65ecc2b664a7246506c0a521fd74c138df86497cc92a64d"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.312916 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"44334a556b9d4118fe2c63c0ebb4656256d4ec36327636844be1f037308424de"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.312927 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"13bf253868dac72b0ce5be7046fee950df05ca0ce9008928edd5ebfdd2b7165a"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.312938 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"dff60277af232755f233f751036be4b7ee3fbe194242772d25319a271fb1030f"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.312952 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bfzd4" event={"ID":"17678ad4-8645-4f4f-a752-c1f92d4610f6","Type":"ContainerStarted","Data":"dcd89286f93ddd35079f91af249f8cd968644f1a06734f8cb0bccfd0753cb8b5"} Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.313003 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:22 crc kubenswrapper[4961]: I1205 17:46:22.336464 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-bfzd4" podStartSLOduration=5.754434915 podStartE2EDuration="15.336445625s" podCreationTimestamp="2025-12-05 17:46:07 +0000 UTC" firstStartedPulling="2025-12-05 17:46:08.7302573 +0000 UTC m=+774.791407773" lastFinishedPulling="2025-12-05 17:46:18.31226801 +0000 UTC m=+784.373418483" observedRunningTime="2025-12-05 17:46:22.331116953 +0000 UTC m=+788.392267446" watchObservedRunningTime="2025-12-05 17:46:22.336445625 +0000 UTC m=+788.397596098" Dec 05 17:46:23 crc kubenswrapper[4961]: I1205 17:46:23.587209 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:23 crc kubenswrapper[4961]: I1205 17:46:23.660962 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.038059 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-kkcbp"] Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.039420 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.042250 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.042483 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-index-dockercfg-cff8l" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.042716 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.059612 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kkcbp"] Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.108947 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ljrqv\" (UniqueName: \"kubernetes.io/projected/3d373e6f-bb87-411d-a17d-b908bcdb093f-kube-api-access-ljrqv\") pod \"openstack-operator-index-kkcbp\" (UID: \"3d373e6f-bb87-411d-a17d-b908bcdb093f\") " pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.209997 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ljrqv\" (UniqueName: \"kubernetes.io/projected/3d373e6f-bb87-411d-a17d-b908bcdb093f-kube-api-access-ljrqv\") pod \"openstack-operator-index-kkcbp\" (UID: \"3d373e6f-bb87-411d-a17d-b908bcdb093f\") " pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.232481 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ljrqv\" (UniqueName: \"kubernetes.io/projected/3d373e6f-bb87-411d-a17d-b908bcdb093f-kube-api-access-ljrqv\") pod \"openstack-operator-index-kkcbp\" (UID: \"3d373e6f-bb87-411d-a17d-b908bcdb093f\") " pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.359504 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:26 crc kubenswrapper[4961]: I1205 17:46:26.807804 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-kkcbp"] Dec 05 17:46:27 crc kubenswrapper[4961]: I1205 17:46:27.246227 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:46:27 crc kubenswrapper[4961]: I1205 17:46:27.246293 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:46:27 crc kubenswrapper[4961]: I1205 17:46:27.340586 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kkcbp" event={"ID":"3d373e6f-bb87-411d-a17d-b908bcdb093f","Type":"ContainerStarted","Data":"1656749dde1bc3f4423cd2616a6130cd37188e4b62dd7fa97db2011148d2e0af"} Dec 05 17:46:28 crc kubenswrapper[4961]: I1205 17:46:28.634930 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7fcb986d4-gsnw2" Dec 05 17:46:31 crc kubenswrapper[4961]: I1205 17:46:31.227920 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-kkcbp"] Dec 05 17:46:31 crc kubenswrapper[4961]: I1205 17:46:31.830162 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-index-vblkt"] Dec 05 17:46:31 crc kubenswrapper[4961]: I1205 17:46:31.830883 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:31 crc kubenswrapper[4961]: I1205 17:46:31.858207 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vblkt"] Dec 05 17:46:31 crc kubenswrapper[4961]: I1205 17:46:31.893708 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mmxgq\" (UniqueName: \"kubernetes.io/projected/2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b-kube-api-access-mmxgq\") pod \"openstack-operator-index-vblkt\" (UID: \"2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b\") " pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:31 crc kubenswrapper[4961]: I1205 17:46:31.997232 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mmxgq\" (UniqueName: \"kubernetes.io/projected/2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b-kube-api-access-mmxgq\") pod \"openstack-operator-index-vblkt\" (UID: \"2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b\") " pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:32 crc kubenswrapper[4961]: I1205 17:46:32.023644 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mmxgq\" (UniqueName: \"kubernetes.io/projected/2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b-kube-api-access-mmxgq\") pod \"openstack-operator-index-vblkt\" (UID: \"2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b\") " pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:32 crc kubenswrapper[4961]: I1205 17:46:32.169006 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:32 crc kubenswrapper[4961]: I1205 17:46:32.876146 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-index-vblkt"] Dec 05 17:46:32 crc kubenswrapper[4961]: W1205 17:46:32.880303 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2fdc52dc_265b_42d1_8b82_4e2c0fbccb3b.slice/crio-01476c1fd017fc1695e8d9a60fbd64edb7a97de89e6fdbd4367329a34bc0e09f WatchSource:0}: Error finding container 01476c1fd017fc1695e8d9a60fbd64edb7a97de89e6fdbd4367329a34bc0e09f: Status 404 returned error can't find the container with id 01476c1fd017fc1695e8d9a60fbd64edb7a97de89e6fdbd4367329a34bc0e09f Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.380793 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kkcbp" event={"ID":"3d373e6f-bb87-411d-a17d-b908bcdb093f","Type":"ContainerStarted","Data":"5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7"} Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.381031 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/openstack-operator-index-kkcbp" podUID="3d373e6f-bb87-411d-a17d-b908bcdb093f" containerName="registry-server" containerID="cri-o://5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7" gracePeriod=2 Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.383216 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vblkt" event={"ID":"2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b","Type":"ContainerStarted","Data":"d605a6d565ca99b92892d50d93d72bfce04d7d70c63e209051bcee20fb7de029"} Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.383265 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-vblkt" event={"ID":"2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b","Type":"ContainerStarted","Data":"01476c1fd017fc1695e8d9a60fbd64edb7a97de89e6fdbd4367329a34bc0e09f"} Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.399153 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-kkcbp" podStartSLOduration=1.7114853810000001 podStartE2EDuration="7.399134011s" podCreationTimestamp="2025-12-05 17:46:26 +0000 UTC" firstStartedPulling="2025-12-05 17:46:26.816278659 +0000 UTC m=+792.877429132" lastFinishedPulling="2025-12-05 17:46:32.503927289 +0000 UTC m=+798.565077762" observedRunningTime="2025-12-05 17:46:33.398829734 +0000 UTC m=+799.459980217" watchObservedRunningTime="2025-12-05 17:46:33.399134011 +0000 UTC m=+799.460284494" Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.418615 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-index-vblkt" podStartSLOduration=2.168122877 podStartE2EDuration="2.41858375s" podCreationTimestamp="2025-12-05 17:46:31 +0000 UTC" firstStartedPulling="2025-12-05 17:46:32.884038006 +0000 UTC m=+798.945188479" lastFinishedPulling="2025-12-05 17:46:33.134498889 +0000 UTC m=+799.195649352" observedRunningTime="2025-12-05 17:46:33.413188207 +0000 UTC m=+799.474338690" watchObservedRunningTime="2025-12-05 17:46:33.41858375 +0000 UTC m=+799.479734233" Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.774554 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.922406 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ljrqv\" (UniqueName: \"kubernetes.io/projected/3d373e6f-bb87-411d-a17d-b908bcdb093f-kube-api-access-ljrqv\") pod \"3d373e6f-bb87-411d-a17d-b908bcdb093f\" (UID: \"3d373e6f-bb87-411d-a17d-b908bcdb093f\") " Dec 05 17:46:33 crc kubenswrapper[4961]: I1205 17:46:33.928950 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d373e6f-bb87-411d-a17d-b908bcdb093f-kube-api-access-ljrqv" (OuterVolumeSpecName: "kube-api-access-ljrqv") pod "3d373e6f-bb87-411d-a17d-b908bcdb093f" (UID: "3d373e6f-bb87-411d-a17d-b908bcdb093f"). InnerVolumeSpecName "kube-api-access-ljrqv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.025262 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ljrqv\" (UniqueName: \"kubernetes.io/projected/3d373e6f-bb87-411d-a17d-b908bcdb093f-kube-api-access-ljrqv\") on node \"crc\" DevicePath \"\"" Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.390894 4961 generic.go:334] "Generic (PLEG): container finished" podID="3d373e6f-bb87-411d-a17d-b908bcdb093f" containerID="5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7" exitCode=0 Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.390977 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-index-kkcbp" Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.391009 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kkcbp" event={"ID":"3d373e6f-bb87-411d-a17d-b908bcdb093f","Type":"ContainerDied","Data":"5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7"} Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.391129 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-index-kkcbp" event={"ID":"3d373e6f-bb87-411d-a17d-b908bcdb093f","Type":"ContainerDied","Data":"1656749dde1bc3f4423cd2616a6130cd37188e4b62dd7fa97db2011148d2e0af"} Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.391167 4961 scope.go:117] "RemoveContainer" containerID="5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7" Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.410921 4961 scope.go:117] "RemoveContainer" containerID="5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7" Dec 05 17:46:34 crc kubenswrapper[4961]: E1205 17:46:34.411574 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7\": container with ID starting with 5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7 not found: ID does not exist" containerID="5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7" Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.411623 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7"} err="failed to get container status \"5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7\": rpc error: code = NotFound desc = could not find container \"5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7\": container with ID starting with 5d081544a1333fce8cbf4a1b75e6bae7ca144358de839ab507697089e5e72af7 not found: ID does not exist" Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.423808 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/openstack-operator-index-kkcbp"] Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.429920 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/openstack-operator-index-kkcbp"] Dec 05 17:46:34 crc kubenswrapper[4961]: I1205 17:46:34.869446 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d373e6f-bb87-411d-a17d-b908bcdb093f" path="/var/lib/kubelet/pods/3d373e6f-bb87-411d-a17d-b908bcdb093f/volumes" Dec 05 17:46:38 crc kubenswrapper[4961]: I1205 17:46:38.589557 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-bfzd4" Dec 05 17:46:42 crc kubenswrapper[4961]: I1205 17:46:42.169476 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:42 crc kubenswrapper[4961]: I1205 17:46:42.169948 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:42 crc kubenswrapper[4961]: I1205 17:46:42.197630 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:42 crc kubenswrapper[4961]: I1205 17:46:42.518006 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-index-vblkt" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.883878 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq"] Dec 05 17:46:55 crc kubenswrapper[4961]: E1205 17:46:55.886058 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d373e6f-bb87-411d-a17d-b908bcdb093f" containerName="registry-server" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.886076 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d373e6f-bb87-411d-a17d-b908bcdb093f" containerName="registry-server" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.886194 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d373e6f-bb87-411d-a17d-b908bcdb093f" containerName="registry-server" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.886980 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.889308 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-4m7dr" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.893383 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq"] Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.926391 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-bundle\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.926443 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mxfd8\" (UniqueName: \"kubernetes.io/projected/39126237-6470-4ff9-9dea-d2a7c88a2540-kube-api-access-mxfd8\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:55 crc kubenswrapper[4961]: I1205 17:46:55.926501 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-util\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.028141 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mxfd8\" (UniqueName: \"kubernetes.io/projected/39126237-6470-4ff9-9dea-d2a7c88a2540-kube-api-access-mxfd8\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.028231 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-util\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.028308 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-bundle\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.028866 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-bundle\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.028973 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-util\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.045943 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mxfd8\" (UniqueName: \"kubernetes.io/projected/39126237-6470-4ff9-9dea-d2a7c88a2540-kube-api-access-mxfd8\") pod \"2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.210495 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.450066 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq"] Dec 05 17:46:56 crc kubenswrapper[4961]: I1205 17:46:56.578197 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" event={"ID":"39126237-6470-4ff9-9dea-d2a7c88a2540","Type":"ContainerStarted","Data":"868bf120e10149cffbf8b8c7146deffdc81af4d1cd01e23c1e4501f8abe88b7a"} Dec 05 17:46:57 crc kubenswrapper[4961]: I1205 17:46:57.245317 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:46:57 crc kubenswrapper[4961]: I1205 17:46:57.245741 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:46:57 crc kubenswrapper[4961]: I1205 17:46:57.588465 4961 generic.go:334] "Generic (PLEG): container finished" podID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerID="ac463b3848b6d8c7b941e2f914457c56bf220aaacdc906b76dcb987b0e0d5c4d" exitCode=0 Dec 05 17:46:57 crc kubenswrapper[4961]: I1205 17:46:57.588530 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" event={"ID":"39126237-6470-4ff9-9dea-d2a7c88a2540","Type":"ContainerDied","Data":"ac463b3848b6d8c7b941e2f914457c56bf220aaacdc906b76dcb987b0e0d5c4d"} Dec 05 17:46:59 crc kubenswrapper[4961]: I1205 17:46:59.603509 4961 generic.go:334] "Generic (PLEG): container finished" podID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerID="5a445b755f7dff87d215ce63a4ceaf7100eb0943c7300180ec093a16c4feb3b2" exitCode=0 Dec 05 17:46:59 crc kubenswrapper[4961]: I1205 17:46:59.603570 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" event={"ID":"39126237-6470-4ff9-9dea-d2a7c88a2540","Type":"ContainerDied","Data":"5a445b755f7dff87d215ce63a4ceaf7100eb0943c7300180ec093a16c4feb3b2"} Dec 05 17:47:02 crc kubenswrapper[4961]: I1205 17:47:02.626550 4961 generic.go:334] "Generic (PLEG): container finished" podID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerID="845135f3403b504343e1514e54245ed8e3ba18178b48b99289e3e97b7aba4fd5" exitCode=0 Dec 05 17:47:02 crc kubenswrapper[4961]: I1205 17:47:02.626694 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" event={"ID":"39126237-6470-4ff9-9dea-d2a7c88a2540","Type":"ContainerDied","Data":"845135f3403b504343e1514e54245ed8e3ba18178b48b99289e3e97b7aba4fd5"} Dec 05 17:47:03 crc kubenswrapper[4961]: I1205 17:47:03.882040 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.029192 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-bundle\") pod \"39126237-6470-4ff9-9dea-d2a7c88a2540\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.029275 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mxfd8\" (UniqueName: \"kubernetes.io/projected/39126237-6470-4ff9-9dea-d2a7c88a2540-kube-api-access-mxfd8\") pod \"39126237-6470-4ff9-9dea-d2a7c88a2540\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.029391 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-util\") pod \"39126237-6470-4ff9-9dea-d2a7c88a2540\" (UID: \"39126237-6470-4ff9-9dea-d2a7c88a2540\") " Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.030717 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-bundle" (OuterVolumeSpecName: "bundle") pod "39126237-6470-4ff9-9dea-d2a7c88a2540" (UID: "39126237-6470-4ff9-9dea-d2a7c88a2540"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.034695 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39126237-6470-4ff9-9dea-d2a7c88a2540-kube-api-access-mxfd8" (OuterVolumeSpecName: "kube-api-access-mxfd8") pod "39126237-6470-4ff9-9dea-d2a7c88a2540" (UID: "39126237-6470-4ff9-9dea-d2a7c88a2540"). InnerVolumeSpecName "kube-api-access-mxfd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.039700 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-util" (OuterVolumeSpecName: "util") pod "39126237-6470-4ff9-9dea-d2a7c88a2540" (UID: "39126237-6470-4ff9-9dea-d2a7c88a2540"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.130702 4961 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-util\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.130733 4961 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/39126237-6470-4ff9-9dea-d2a7c88a2540-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.130747 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mxfd8\" (UniqueName: \"kubernetes.io/projected/39126237-6470-4ff9-9dea-d2a7c88a2540-kube-api-access-mxfd8\") on node \"crc\" DevicePath \"\"" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.643293 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" event={"ID":"39126237-6470-4ff9-9dea-d2a7c88a2540","Type":"ContainerDied","Data":"868bf120e10149cffbf8b8c7146deffdc81af4d1cd01e23c1e4501f8abe88b7a"} Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.643333 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="868bf120e10149cffbf8b8c7146deffdc81af4d1cd01e23c1e4501f8abe88b7a" Dec 05 17:47:04 crc kubenswrapper[4961]: I1205 17:47:04.643379 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.228594 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp"] Dec 05 17:47:07 crc kubenswrapper[4961]: E1205 17:47:07.229236 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="util" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.229252 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="util" Dec 05 17:47:07 crc kubenswrapper[4961]: E1205 17:47:07.229270 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="pull" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.229278 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="pull" Dec 05 17:47:07 crc kubenswrapper[4961]: E1205 17:47:07.229310 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="extract" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.229320 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="extract" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.229460 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="39126237-6470-4ff9-9dea-d2a7c88a2540" containerName="extract" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.229982 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.232555 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-operator-dockercfg-hhkc9" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.248421 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp"] Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.373410 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gxg5j\" (UniqueName: \"kubernetes.io/projected/8359ac81-7e2e-4a86-9052-2cba7e945d40-kube-api-access-gxg5j\") pod \"openstack-operator-controller-operator-d885c5b7-b9mhp\" (UID: \"8359ac81-7e2e-4a86-9052-2cba7e945d40\") " pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.474320 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gxg5j\" (UniqueName: \"kubernetes.io/projected/8359ac81-7e2e-4a86-9052-2cba7e945d40-kube-api-access-gxg5j\") pod \"openstack-operator-controller-operator-d885c5b7-b9mhp\" (UID: \"8359ac81-7e2e-4a86-9052-2cba7e945d40\") " pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.500069 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gxg5j\" (UniqueName: \"kubernetes.io/projected/8359ac81-7e2e-4a86-9052-2cba7e945d40-kube-api-access-gxg5j\") pod \"openstack-operator-controller-operator-d885c5b7-b9mhp\" (UID: \"8359ac81-7e2e-4a86-9052-2cba7e945d40\") " pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.549264 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:07 crc kubenswrapper[4961]: I1205 17:47:07.792706 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp"] Dec 05 17:47:08 crc kubenswrapper[4961]: I1205 17:47:08.667750 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" event={"ID":"8359ac81-7e2e-4a86-9052-2cba7e945d40","Type":"ContainerStarted","Data":"2e826fc6eca53adaffdd51da0811abc2ecc122492564bcb807fbb623a5983e60"} Dec 05 17:47:14 crc kubenswrapper[4961]: I1205 17:47:14.709270 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" event={"ID":"8359ac81-7e2e-4a86-9052-2cba7e945d40","Type":"ContainerStarted","Data":"09725b26311f0984f5adfcfc7b97fc5f9d3b7867d37b2b7d4b3d77c94f4c99f5"} Dec 05 17:47:14 crc kubenswrapper[4961]: I1205 17:47:14.709729 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:14 crc kubenswrapper[4961]: I1205 17:47:14.736529 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" podStartSLOduration=1.646172989 podStartE2EDuration="7.736510763s" podCreationTimestamp="2025-12-05 17:47:07 +0000 UTC" firstStartedPulling="2025-12-05 17:47:07.804060174 +0000 UTC m=+833.865210647" lastFinishedPulling="2025-12-05 17:47:13.894397958 +0000 UTC m=+839.955548421" observedRunningTime="2025-12-05 17:47:14.734591645 +0000 UTC m=+840.795742118" watchObservedRunningTime="2025-12-05 17:47:14.736510763 +0000 UTC m=+840.797661246" Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.245937 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.246742 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.246803 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.247531 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8298295ec17dfc63ef58bd3072d80116c1dcc72b110ce7d9f28c0734b811e20a"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.247595 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://8298295ec17dfc63ef58bd3072d80116c1dcc72b110ce7d9f28c0734b811e20a" gracePeriod=600 Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.552423 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-operator-d885c5b7-b9mhp" Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.802078 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="8298295ec17dfc63ef58bd3072d80116c1dcc72b110ce7d9f28c0734b811e20a" exitCode=0 Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.802141 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"8298295ec17dfc63ef58bd3072d80116c1dcc72b110ce7d9f28c0734b811e20a"} Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.802612 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"7d9ed5247bcd400783c6d7e2c5bad6038c840849b5210eed14fdd0422b90e593"} Dec 05 17:47:27 crc kubenswrapper[4961]: I1205 17:47:27.802637 4961 scope.go:117] "RemoveContainer" containerID="186f0ffa4d8d4244121fc3f97d5899df054ba290ecdaef527b5b09c64012516b" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.697750 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pqstj"] Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.699439 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.708839 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqstj"] Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.743899 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-catalog-content\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.743959 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-blk5n\" (UniqueName: \"kubernetes.io/projected/4ebb57ac-af33-476b-afed-3f23988fe0da-kube-api-access-blk5n\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.744022 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-utilities\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.845125 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-catalog-content\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.845179 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-blk5n\" (UniqueName: \"kubernetes.io/projected/4ebb57ac-af33-476b-afed-3f23988fe0da-kube-api-access-blk5n\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.845216 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-utilities\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.846002 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-utilities\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.846104 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-catalog-content\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:39 crc kubenswrapper[4961]: I1205 17:47:39.863958 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-blk5n\" (UniqueName: \"kubernetes.io/projected/4ebb57ac-af33-476b-afed-3f23988fe0da-kube-api-access-blk5n\") pod \"redhat-marketplace-pqstj\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:40 crc kubenswrapper[4961]: I1205 17:47:40.018959 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:40 crc kubenswrapper[4961]: I1205 17:47:40.482254 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqstj"] Dec 05 17:47:40 crc kubenswrapper[4961]: I1205 17:47:40.884517 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerStarted","Data":"3cdcd5c653e35e22cc04ea19278aaee1388b9b4851ea7c3879b7aa04a15ae958"} Dec 05 17:47:41 crc kubenswrapper[4961]: I1205 17:47:41.891080 4961 generic.go:334] "Generic (PLEG): container finished" podID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerID="b399b6b4d64b0dedeb462569c11e509de2a5b909472f44e857ef2cac58b89f94" exitCode=0 Dec 05 17:47:41 crc kubenswrapper[4961]: I1205 17:47:41.891143 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerDied","Data":"b399b6b4d64b0dedeb462569c11e509de2a5b909472f44e857ef2cac58b89f94"} Dec 05 17:47:42 crc kubenswrapper[4961]: I1205 17:47:42.901181 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerStarted","Data":"7b413fd9c9c5c4ed59e9804ba7d4193bd79f474913ef128184ea0b52fc6dd644"} Dec 05 17:47:43 crc kubenswrapper[4961]: I1205 17:47:43.909277 4961 generic.go:334] "Generic (PLEG): container finished" podID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerID="7b413fd9c9c5c4ed59e9804ba7d4193bd79f474913ef128184ea0b52fc6dd644" exitCode=0 Dec 05 17:47:43 crc kubenswrapper[4961]: I1205 17:47:43.909376 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerDied","Data":"7b413fd9c9c5c4ed59e9804ba7d4193bd79f474913ef128184ea0b52fc6dd644"} Dec 05 17:47:44 crc kubenswrapper[4961]: I1205 17:47:44.919072 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerStarted","Data":"4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d"} Dec 05 17:47:44 crc kubenswrapper[4961]: I1205 17:47:44.942080 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pqstj" podStartSLOduration=3.354466148 podStartE2EDuration="5.942057428s" podCreationTimestamp="2025-12-05 17:47:39 +0000 UTC" firstStartedPulling="2025-12-05 17:47:41.892742609 +0000 UTC m=+867.953893082" lastFinishedPulling="2025-12-05 17:47:44.480333889 +0000 UTC m=+870.541484362" observedRunningTime="2025-12-05 17:47:44.938651115 +0000 UTC m=+870.999801598" watchObservedRunningTime="2025-12-05 17:47:44.942057428 +0000 UTC m=+871.003207911" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.298857 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.300552 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.303541 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"barbican-operator-controller-manager-dockercfg-wtwwc" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.308031 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.310604 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.313941 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"cinder-operator-controller-manager-dockercfg-dhjbl" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.317214 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.323854 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.331868 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.333075 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.339157 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"designate-operator-controller-manager-dockercfg-lr8rb" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.349859 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.350936 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.353630 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"glance-operator-controller-manager-dockercfg-bhdcz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.356277 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.379538 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.395626 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.396837 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.401731 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"heat-operator-controller-manager-dockercfg-np2bh" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.412471 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.413544 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.414428 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.418677 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.419646 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.425673 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-webhook-server-cert" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.425845 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-x98hz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.426022 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"horizon-operator-controller-manager-dockercfg-wtxpz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.435099 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5cjz\" (UniqueName: \"kubernetes.io/projected/406dca34-428b-493b-b564-511542c2bad6-kube-api-access-m5cjz\") pod \"cinder-operator-controller-manager-859b6ccc6-nw6x8\" (UID: \"406dca34-428b-493b-b564-511542c2bad6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.435180 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldlwp\" (UniqueName: \"kubernetes.io/projected/6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991-kube-api-access-ldlwp\") pod \"designate-operator-controller-manager-78b4bc895b-qnmsr\" (UID: \"6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.435230 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-prf5q\" (UniqueName: \"kubernetes.io/projected/705eb884-eb46-4d59-86ee-c2f1587d5df4-kube-api-access-prf5q\") pod \"barbican-operator-controller-manager-7d9dfd778-cnqj9\" (UID: \"705eb884-eb46-4d59-86ee-c2f1587d5df4\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.435309 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.470400 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.481989 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.483724 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.491638 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ironic-operator-controller-manager-dockercfg-mjksx" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.516817 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.534667 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.537171 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.541548 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.541857 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg48b\" (UniqueName: \"kubernetes.io/projected/4d42ce43-3c27-4007-a20b-e0068beb2490-kube-api-access-hg48b\") pod \"heat-operator-controller-manager-5f64f6f8bb-95v4g\" (UID: \"4d42ce43-3c27-4007-a20b-e0068beb2490\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.541945 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4fndk\" (UniqueName: \"kubernetes.io/projected/af9a8d55-8cff-40e2-9f1b-bbd05c3eea80-kube-api-access-4fndk\") pod \"horizon-operator-controller-manager-68c6d99b8f-gd5nk\" (UID: \"af9a8d55-8cff-40e2-9f1b-bbd05c3eea80\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.542097 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-prf5q\" (UniqueName: \"kubernetes.io/projected/705eb884-eb46-4d59-86ee-c2f1587d5df4-kube-api-access-prf5q\") pod \"barbican-operator-controller-manager-7d9dfd778-cnqj9\" (UID: \"705eb884-eb46-4d59-86ee-c2f1587d5df4\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.542198 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rjsk\" (UniqueName: \"kubernetes.io/projected/0b1fd140-fbf5-4d64-950b-b0bdcd07ec54-kube-api-access-9rjsk\") pod \"glance-operator-controller-manager-77987cd8cd-dlwzz\" (UID: \"0b1fd140-fbf5-4d64-950b-b0bdcd07ec54\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.542285 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5cjz\" (UniqueName: \"kubernetes.io/projected/406dca34-428b-493b-b564-511542c2bad6-kube-api-access-m5cjz\") pod \"cinder-operator-controller-manager-859b6ccc6-nw6x8\" (UID: \"406dca34-428b-493b-b564-511542c2bad6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.542400 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cpjp\" (UniqueName: \"kubernetes.io/projected/77f7ec48-3abf-4934-a703-fa3f5edfbd27-kube-api-access-8cpjp\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.542496 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldlwp\" (UniqueName: \"kubernetes.io/projected/6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991-kube-api-access-ldlwp\") pod \"designate-operator-controller-manager-78b4bc895b-qnmsr\" (UID: \"6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.543592 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-chwr4" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.589831 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5cjz\" (UniqueName: \"kubernetes.io/projected/406dca34-428b-493b-b564-511542c2bad6-kube-api-access-m5cjz\") pod \"cinder-operator-controller-manager-859b6ccc6-nw6x8\" (UID: \"406dca34-428b-493b-b564-511542c2bad6\") " pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.590174 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.595809 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-prf5q\" (UniqueName: \"kubernetes.io/projected/705eb884-eb46-4d59-86ee-c2f1587d5df4-kube-api-access-prf5q\") pod \"barbican-operator-controller-manager-7d9dfd778-cnqj9\" (UID: \"705eb884-eb46-4d59-86ee-c2f1587d5df4\") " pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.608588 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldlwp\" (UniqueName: \"kubernetes.io/projected/6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991-kube-api-access-ldlwp\") pod \"designate-operator-controller-manager-78b4bc895b-qnmsr\" (UID: \"6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991\") " pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.623073 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.641904 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643559 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rjsk\" (UniqueName: \"kubernetes.io/projected/0b1fd140-fbf5-4d64-950b-b0bdcd07ec54-kube-api-access-9rjsk\") pod \"glance-operator-controller-manager-77987cd8cd-dlwzz\" (UID: \"0b1fd140-fbf5-4d64-950b-b0bdcd07ec54\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643607 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjb7c\" (UniqueName: \"kubernetes.io/projected/90df3fec-9bc9-48ca-a432-374c1f7e2002-kube-api-access-hjb7c\") pod \"manila-operator-controller-manager-7c79b5df47-q2p4h\" (UID: \"90df3fec-9bc9-48ca-a432-374c1f7e2002\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643638 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cswtv\" (UniqueName: \"kubernetes.io/projected/2bb86b5f-1ee1-48c0-bcc1-60ca583c1339-kube-api-access-cswtv\") pod \"ironic-operator-controller-manager-6c548fd776-zjbvv\" (UID: \"2bb86b5f-1ee1-48c0-bcc1-60ca583c1339\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643675 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cpjp\" (UniqueName: \"kubernetes.io/projected/77f7ec48-3abf-4934-a703-fa3f5edfbd27-kube-api-access-8cpjp\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643734 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643769 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg48b\" (UniqueName: \"kubernetes.io/projected/4d42ce43-3c27-4007-a20b-e0068beb2490-kube-api-access-hg48b\") pod \"heat-operator-controller-manager-5f64f6f8bb-95v4g\" (UID: \"4d42ce43-3c27-4007-a20b-e0068beb2490\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.643829 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4fndk\" (UniqueName: \"kubernetes.io/projected/af9a8d55-8cff-40e2-9f1b-bbd05c3eea80-kube-api-access-4fndk\") pod \"horizon-operator-controller-manager-68c6d99b8f-gd5nk\" (UID: \"af9a8d55-8cff-40e2-9f1b-bbd05c3eea80\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:47:46 crc kubenswrapper[4961]: E1205 17:47:46.644529 4961 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:46 crc kubenswrapper[4961]: E1205 17:47:46.644576 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert podName:77f7ec48-3abf-4934-a703-fa3f5edfbd27 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:47.144557156 +0000 UTC m=+873.205707629 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert") pod "infra-operator-controller-manager-84b9cfc694-lml7r" (UID: "77f7ec48-3abf-4934-a703-fa3f5edfbd27") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.651912 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.653267 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.654363 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.658502 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-g8vxs" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.659804 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.666531 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.669210 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.673341 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-2w8tx" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.695077 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.716435 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg48b\" (UniqueName: \"kubernetes.io/projected/4d42ce43-3c27-4007-a20b-e0068beb2490-kube-api-access-hg48b\") pod \"heat-operator-controller-manager-5f64f6f8bb-95v4g\" (UID: \"4d42ce43-3c27-4007-a20b-e0068beb2490\") " pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.717670 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4fndk\" (UniqueName: \"kubernetes.io/projected/af9a8d55-8cff-40e2-9f1b-bbd05c3eea80-kube-api-access-4fndk\") pod \"horizon-operator-controller-manager-68c6d99b8f-gd5nk\" (UID: \"af9a8d55-8cff-40e2-9f1b-bbd05c3eea80\") " pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.724314 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rjsk\" (UniqueName: \"kubernetes.io/projected/0b1fd140-fbf5-4d64-950b-b0bdcd07ec54-kube-api-access-9rjsk\") pod \"glance-operator-controller-manager-77987cd8cd-dlwzz\" (UID: \"0b1fd140-fbf5-4d64-950b-b0bdcd07ec54\") " pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.736099 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.737272 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cpjp\" (UniqueName: \"kubernetes.io/projected/77f7ec48-3abf-4934-a703-fa3f5edfbd27-kube-api-access-8cpjp\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.744236 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn"] Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.897885 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.898670 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:47:46 crc kubenswrapper[4961]: I1205 17:47:46.914484 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"neutron-operator-controller-manager-dockercfg-8hhtk" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.046576 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjb7c\" (UniqueName: \"kubernetes.io/projected/90df3fec-9bc9-48ca-a432-374c1f7e2002-kube-api-access-hjb7c\") pod \"manila-operator-controller-manager-7c79b5df47-q2p4h\" (UID: \"90df3fec-9bc9-48ca-a432-374c1f7e2002\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.046649 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cswtv\" (UniqueName: \"kubernetes.io/projected/2bb86b5f-1ee1-48c0-bcc1-60ca583c1339-kube-api-access-cswtv\") pod \"ironic-operator-controller-manager-6c548fd776-zjbvv\" (UID: \"2bb86b5f-1ee1-48c0-bcc1-60ca583c1339\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.049761 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.130423 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cswtv\" (UniqueName: \"kubernetes.io/projected/2bb86b5f-1ee1-48c0-bcc1-60ca583c1339-kube-api-access-cswtv\") pod \"ironic-operator-controller-manager-6c548fd776-zjbvv\" (UID: \"2bb86b5f-1ee1-48c0-bcc1-60ca583c1339\") " pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.145104 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjb7c\" (UniqueName: \"kubernetes.io/projected/90df3fec-9bc9-48ca-a432-374c1f7e2002-kube-api-access-hjb7c\") pod \"manila-operator-controller-manager-7c79b5df47-q2p4h\" (UID: \"90df3fec-9bc9-48ca-a432-374c1f7e2002\") " pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.153807 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dg6jj\" (UniqueName: \"kubernetes.io/projected/0499fdb9-20d5-445c-9ca0-4492287fbcc0-kube-api-access-dg6jj\") pod \"keystone-operator-controller-manager-7765d96ddf-bnjfr\" (UID: \"0499fdb9-20d5-445c-9ca0-4492287fbcc0\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.153850 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.153865 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8q6bj\" (UniqueName: \"kubernetes.io/projected/f7757573-1085-4560-880c-3d9b36ce93f7-kube-api-access-8q6bj\") pod \"mariadb-operator-controller-manager-56bbcc9d85-4whjc\" (UID: \"f7757573-1085-4560-880c-3d9b36ce93f7\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.153884 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjnht\" (UniqueName: \"kubernetes.io/projected/f7ff9bd4-8a05-4a50-b38b-701451107b9f-kube-api-access-rjnht\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-pszjn\" (UID: \"f7ff9bd4-8a05-4a50-b38b-701451107b9f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.155020 4961 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.155058 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert podName:77f7ec48-3abf-4934-a703-fa3f5edfbd27 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:48.155044717 +0000 UTC m=+874.216195190 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert") pod "infra-operator-controller-manager-84b9cfc694-lml7r" (UID: "77f7ec48-3abf-4934-a703-fa3f5edfbd27") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.161321 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.164159 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.164184 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.164195 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.164921 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.165416 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.165685 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.166713 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.166733 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.166746 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.167536 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.167552 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.167972 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.168631 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.169029 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.169103 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"nova-operator-controller-manager-dockercfg-gl45r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.169838 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.170790 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"ovn-operator-controller-manager-dockercfg-g4c6h" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.170970 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-webhook-server-cert" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.171815 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-ndxxg" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.171982 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"octavia-operator-controller-manager-dockercfg-j5gwm" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.176141 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.176171 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.176183 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.176424 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.177122 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.178878 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.179659 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.179689 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.179699 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.180448 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.180526 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.180693 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.191968 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"swift-operator-controller-manager-dockercfg-94dh2" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.192176 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"placement-operator-controller-manager-dockercfg-slhwm" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.192320 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"telemetry-operator-controller-manager-dockercfg-4p8sz" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.195099 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"watcher-operator-controller-manager-dockercfg-84kn6" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.195236 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"test-operator-controller-manager-dockercfg-bxh75" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.200092 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.256177 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7hvk4\" (UniqueName: \"kubernetes.io/projected/c567a803-253f-4895-a504-caee7ba37c34-kube-api-access-7hvk4\") pod \"watcher-operator-controller-manager-769dc69bc-2jz8d\" (UID: \"c567a803-253f-4895-a504-caee7ba37c34\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.256261 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dg6jj\" (UniqueName: \"kubernetes.io/projected/0499fdb9-20d5-445c-9ca0-4492287fbcc0-kube-api-access-dg6jj\") pod \"keystone-operator-controller-manager-7765d96ddf-bnjfr\" (UID: \"0499fdb9-20d5-445c-9ca0-4492287fbcc0\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.256316 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8q6bj\" (UniqueName: \"kubernetes.io/projected/f7757573-1085-4560-880c-3d9b36ce93f7-kube-api-access-8q6bj\") pod \"mariadb-operator-controller-manager-56bbcc9d85-4whjc\" (UID: \"f7757573-1085-4560-880c-3d9b36ce93f7\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.256346 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjnht\" (UniqueName: \"kubernetes.io/projected/f7ff9bd4-8a05-4a50-b38b-701451107b9f-kube-api-access-rjnht\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-pszjn\" (UID: \"f7ff9bd4-8a05-4a50-b38b-701451107b9f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.293752 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8q6bj\" (UniqueName: \"kubernetes.io/projected/f7757573-1085-4560-880c-3d9b36ce93f7-kube-api-access-8q6bj\") pod \"mariadb-operator-controller-manager-56bbcc9d85-4whjc\" (UID: \"f7757573-1085-4560-880c-3d9b36ce93f7\") " pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.303139 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dg6jj\" (UniqueName: \"kubernetes.io/projected/0499fdb9-20d5-445c-9ca0-4492287fbcc0-kube-api-access-dg6jj\") pod \"keystone-operator-controller-manager-7765d96ddf-bnjfr\" (UID: \"0499fdb9-20d5-445c-9ca0-4492287fbcc0\") " pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.304206 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjnht\" (UniqueName: \"kubernetes.io/projected/f7ff9bd4-8a05-4a50-b38b-701451107b9f-kube-api-access-rjnht\") pod \"neutron-operator-controller-manager-5fdfd5b6b5-pszjn\" (UID: \"f7ff9bd4-8a05-4a50-b38b-701451107b9f\") " pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358363 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7t8qs\" (UniqueName: \"kubernetes.io/projected/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-kube-api-access-7t8qs\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358435 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p7vlw\" (UniqueName: \"kubernetes.io/projected/a69e4847-13bc-4c1e-82a9-546fb11ad38d-kube-api-access-p7vlw\") pod \"octavia-operator-controller-manager-998648c74-2b6j2\" (UID: \"a69e4847-13bc-4c1e-82a9-546fb11ad38d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358495 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sltp8\" (UniqueName: \"kubernetes.io/projected/e27c6cc8-ef18-421e-8a7b-1b6bb2227724-kube-api-access-sltp8\") pod \"test-operator-controller-manager-5854674fcc-4k7dd\" (UID: \"e27c6cc8-ef18-421e-8a7b-1b6bb2227724\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358533 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358564 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfwxz\" (UniqueName: \"kubernetes.io/projected/0770d71d-11ea-4b63-8a98-31521f395686-kube-api-access-zfwxz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-dsmtk\" (UID: \"0770d71d-11ea-4b63-8a98-31521f395686\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358590 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp9k8\" (UniqueName: \"kubernetes.io/projected/e661480b-d6fd-4c16-9f03-f519092d05c6-kube-api-access-wp9k8\") pod \"nova-operator-controller-manager-697bc559fc-hgv2r\" (UID: \"e661480b-d6fd-4c16-9f03-f519092d05c6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358644 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpdt5\" (UniqueName: \"kubernetes.io/projected/8fd2cc5d-67e4-4b9a-9d0c-58993491bc08-kube-api-access-xpdt5\") pod \"ovn-operator-controller-manager-b6456fdb6-jghkw\" (UID: \"8fd2cc5d-67e4-4b9a-9d0c-58993491bc08\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358682 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7hvk4\" (UniqueName: \"kubernetes.io/projected/c567a803-253f-4895-a504-caee7ba37c34-kube-api-access-7hvk4\") pod \"watcher-operator-controller-manager-769dc69bc-2jz8d\" (UID: \"c567a803-253f-4895-a504-caee7ba37c34\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358721 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-btm9j\" (UniqueName: \"kubernetes.io/projected/7ff3fbef-1490-4ce9-b350-03a0a7182b78-kube-api-access-btm9j\") pod \"placement-operator-controller-manager-78f8948974-zbcdb\" (UID: \"7ff3fbef-1490-4ce9-b350-03a0a7182b78\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.358748 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9gvvj\" (UniqueName: \"kubernetes.io/projected/767519b7-2067-4fca-a96b-bf9b02e1b273-kube-api-access-9gvvj\") pod \"swift-operator-controller-manager-5f8c65bbfc-c8x5r\" (UID: \"767519b7-2067-4fca-a96b-bf9b02e1b273\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.360157 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.372337 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.380465 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.382472 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.397203 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.413756 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.414151 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-operator-controller-manager-dockercfg-46clf" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.415672 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"metrics-server-cert" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.415814 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.420925 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7hvk4\" (UniqueName: \"kubernetes.io/projected/c567a803-253f-4895-a504-caee7ba37c34-kube-api-access-7hvk4\") pod \"watcher-operator-controller-manager-769dc69bc-2jz8d\" (UID: \"c567a803-253f-4895-a504-caee7ba37c34\") " pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.422475 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468051 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9gvvj\" (UniqueName: \"kubernetes.io/projected/767519b7-2067-4fca-a96b-bf9b02e1b273-kube-api-access-9gvvj\") pod \"swift-operator-controller-manager-5f8c65bbfc-c8x5r\" (UID: \"767519b7-2067-4fca-a96b-bf9b02e1b273\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468157 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7t8qs\" (UniqueName: \"kubernetes.io/projected/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-kube-api-access-7t8qs\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468217 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p7vlw\" (UniqueName: \"kubernetes.io/projected/a69e4847-13bc-4c1e-82a9-546fb11ad38d-kube-api-access-p7vlw\") pod \"octavia-operator-controller-manager-998648c74-2b6j2\" (UID: \"a69e4847-13bc-4c1e-82a9-546fb11ad38d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468248 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9jnb\" (UniqueName: \"kubernetes.io/projected/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-kube-api-access-l9jnb\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468288 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468335 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.468366 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sltp8\" (UniqueName: \"kubernetes.io/projected/e27c6cc8-ef18-421e-8a7b-1b6bb2227724-kube-api-access-sltp8\") pod \"test-operator-controller-manager-5854674fcc-4k7dd\" (UID: \"e27c6cc8-ef18-421e-8a7b-1b6bb2227724\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.469841 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.469901 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfwxz\" (UniqueName: \"kubernetes.io/projected/0770d71d-11ea-4b63-8a98-31521f395686-kube-api-access-zfwxz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-dsmtk\" (UID: \"0770d71d-11ea-4b63-8a98-31521f395686\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.473229 4961 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.473340 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert podName:ccea0c2d-817c-4895-b8a7-bf852bd12aa9 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:47.973318281 +0000 UTC m=+874.034468754 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" (UID: "ccea0c2d-817c-4895-b8a7-bf852bd12aa9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.474227 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp9k8\" (UniqueName: \"kubernetes.io/projected/e661480b-d6fd-4c16-9f03-f519092d05c6-kube-api-access-wp9k8\") pod \"nova-operator-controller-manager-697bc559fc-hgv2r\" (UID: \"e661480b-d6fd-4c16-9f03-f519092d05c6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.474311 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpdt5\" (UniqueName: \"kubernetes.io/projected/8fd2cc5d-67e4-4b9a-9d0c-58993491bc08-kube-api-access-xpdt5\") pod \"ovn-operator-controller-manager-b6456fdb6-jghkw\" (UID: \"8fd2cc5d-67e4-4b9a-9d0c-58993491bc08\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.474409 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-btm9j\" (UniqueName: \"kubernetes.io/projected/7ff3fbef-1490-4ce9-b350-03a0a7182b78-kube-api-access-btm9j\") pod \"placement-operator-controller-manager-78f8948974-zbcdb\" (UID: \"7ff3fbef-1490-4ce9-b350-03a0a7182b78\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.480053 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.578515 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-btm9j\" (UniqueName: \"kubernetes.io/projected/7ff3fbef-1490-4ce9-b350-03a0a7182b78-kube-api-access-btm9j\") pod \"placement-operator-controller-manager-78f8948974-zbcdb\" (UID: \"7ff3fbef-1490-4ce9-b350-03a0a7182b78\") " pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.583830 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p7vlw\" (UniqueName: \"kubernetes.io/projected/a69e4847-13bc-4c1e-82a9-546fb11ad38d-kube-api-access-p7vlw\") pod \"octavia-operator-controller-manager-998648c74-2b6j2\" (UID: \"a69e4847-13bc-4c1e-82a9-546fb11ad38d\") " pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.587625 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpdt5\" (UniqueName: \"kubernetes.io/projected/8fd2cc5d-67e4-4b9a-9d0c-58993491bc08-kube-api-access-xpdt5\") pod \"ovn-operator-controller-manager-b6456fdb6-jghkw\" (UID: \"8fd2cc5d-67e4-4b9a-9d0c-58993491bc08\") " pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.592856 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfwxz\" (UniqueName: \"kubernetes.io/projected/0770d71d-11ea-4b63-8a98-31521f395686-kube-api-access-zfwxz\") pod \"telemetry-operator-controller-manager-76cc84c6bb-dsmtk\" (UID: \"0770d71d-11ea-4b63-8a98-31521f395686\") " pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.599216 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7t8qs\" (UniqueName: \"kubernetes.io/projected/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-kube-api-access-7t8qs\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.599435 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sltp8\" (UniqueName: \"kubernetes.io/projected/e27c6cc8-ef18-421e-8a7b-1b6bb2227724-kube-api-access-sltp8\") pod \"test-operator-controller-manager-5854674fcc-4k7dd\" (UID: \"e27c6cc8-ef18-421e-8a7b-1b6bb2227724\") " pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.600211 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.600333 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9jnb\" (UniqueName: \"kubernetes.io/projected/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-kube-api-access-l9jnb\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.600357 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.600450 4961 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.600487 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:48.100475045 +0000 UTC m=+874.161625518 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "metrics-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.600616 4961 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: E1205 17:47:47.600853 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:48.100841853 +0000 UTC m=+874.161992326 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "webhook-server-cert" not found Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.601718 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp9k8\" (UniqueName: \"kubernetes.io/projected/e661480b-d6fd-4c16-9f03-f519092d05c6-kube-api-access-wp9k8\") pod \"nova-operator-controller-manager-697bc559fc-hgv2r\" (UID: \"e661480b-d6fd-4c16-9f03-f519092d05c6\") " pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.604893 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.629819 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9gvvj\" (UniqueName: \"kubernetes.io/projected/767519b7-2067-4fca-a96b-bf9b02e1b273-kube-api-access-9gvvj\") pod \"swift-operator-controller-manager-5f8c65bbfc-c8x5r\" (UID: \"767519b7-2067-4fca-a96b-bf9b02e1b273\") " pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.650053 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.651579 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9jnb\" (UniqueName: \"kubernetes.io/projected/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-kube-api-access-l9jnb\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.659541 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.660459 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.662961 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-controller-manager-dockercfg-sgtqm" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.668912 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.688191 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.714014 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8cdbq\" (UniqueName: \"kubernetes.io/projected/39ba1343-9933-483d-aef2-90e0ceb14c79-kube-api-access-8cdbq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4dbp7\" (UID: \"39ba1343-9933-483d-aef2-90e0ceb14c79\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.750639 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9"] Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.798793 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.817900 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.817975 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8cdbq\" (UniqueName: \"kubernetes.io/projected/39ba1343-9933-483d-aef2-90e0ceb14c79-kube-api-access-8cdbq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4dbp7\" (UID: \"39ba1343-9933-483d-aef2-90e0ceb14c79\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.853114 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.873388 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8cdbq\" (UniqueName: \"kubernetes.io/projected/39ba1343-9933-483d-aef2-90e0ceb14c79-kube-api-access-8cdbq\") pod \"rabbitmq-cluster-operator-manager-668c99d594-4dbp7\" (UID: \"39ba1343-9933-483d-aef2-90e0ceb14c79\") " pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.879945 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:47:47 crc kubenswrapper[4961]: I1205 17:47:47.953557 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.026078 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.026341 4961 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.026427 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert podName:ccea0c2d-817c-4895-b8a7-bf852bd12aa9 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:49.026399351 +0000 UTC m=+875.087549824 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" (UID: "ccea0c2d-817c-4895-b8a7-bf852bd12aa9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.127204 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.127398 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.127416 4961 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.127489 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:49.127473392 +0000 UTC m=+875.188623865 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "webhook-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.127587 4961 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.127637 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:49.127622426 +0000 UTC m=+875.188772969 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "metrics-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.229049 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.229903 4961 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: E1205 17:47:48.229983 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert podName:77f7ec48-3abf-4934-a703-fa3f5edfbd27 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:50.229945707 +0000 UTC m=+876.291096180 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert") pod "infra-operator-controller-manager-84b9cfc694-lml7r" (UID: "77f7ec48-3abf-4934-a703-fa3f5edfbd27") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.262943 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" event={"ID":"705eb884-eb46-4d59-86ee-c2f1587d5df4","Type":"ContainerStarted","Data":"fab2a99989f7cfb4be2f125f6897c9ff3cf765a58b190e7dea1b82b931df51fc"} Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.357315 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g"] Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.377887 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr"] Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.385499 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8"] Dec 05 17:47:48 crc kubenswrapper[4961]: W1205 17:47:48.397202 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod406dca34_428b_493b_b564_511542c2bad6.slice/crio-d12b7784c90104557b89451f0e77790d3492576d50cce4ccaced3fcd533295b7 WatchSource:0}: Error finding container d12b7784c90104557b89451f0e77790d3492576d50cce4ccaced3fcd533295b7: Status 404 returned error can't find the container with id d12b7784c90104557b89451f0e77790d3492576d50cce4ccaced3fcd533295b7 Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.548316 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h"] Dec 05 17:47:48 crc kubenswrapper[4961]: W1205 17:47:48.554553 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf9a8d55_8cff_40e2_9f1b_bbd05c3eea80.slice/crio-b162fa9999ae3afaa49b3b5edb57ff9b8e46d404d6848a0ea50c85c14cb88646 WatchSource:0}: Error finding container b162fa9999ae3afaa49b3b5edb57ff9b8e46d404d6848a0ea50c85c14cb88646: Status 404 returned error can't find the container with id b162fa9999ae3afaa49b3b5edb57ff9b8e46d404d6848a0ea50c85c14cb88646 Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.554754 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk"] Dec 05 17:47:48 crc kubenswrapper[4961]: W1205 17:47:48.559081 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod90df3fec_9bc9_48ca_a432_374c1f7e2002.slice/crio-a06ef5ae3ab5ff3f5116797dcf16a3cc9b02830ffbabda2aaba987e69afddb97 WatchSource:0}: Error finding container a06ef5ae3ab5ff3f5116797dcf16a3cc9b02830ffbabda2aaba987e69afddb97: Status 404 returned error can't find the container with id a06ef5ae3ab5ff3f5116797dcf16a3cc9b02830ffbabda2aaba987e69afddb97 Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.570001 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc"] Dec 05 17:47:48 crc kubenswrapper[4961]: W1205 17:47:48.571089 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf7757573_1085_4560_880c_3d9b36ce93f7.slice/crio-16324ad1a476e2c159056881d3634251fe0034a391ddc72dc883ae8e9e44dffd WatchSource:0}: Error finding container 16324ad1a476e2c159056881d3634251fe0034a391ddc72dc883ae8e9e44dffd: Status 404 returned error can't find the container with id 16324ad1a476e2c159056881d3634251fe0034a391ddc72dc883ae8e9e44dffd Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.577375 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz"] Dec 05 17:47:48 crc kubenswrapper[4961]: W1205 17:47:48.580927 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0b1fd140_fbf5_4d64_950b_b0bdcd07ec54.slice/crio-116df6707541a5c7b07ff73e8a48f915e052cba93f687d66080b55a160a7b2d7 WatchSource:0}: Error finding container 116df6707541a5c7b07ff73e8a48f915e052cba93f687d66080b55a160a7b2d7: Status 404 returned error can't find the container with id 116df6707541a5c7b07ff73e8a48f915e052cba93f687d66080b55a160a7b2d7 Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.738579 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d"] Dec 05 17:47:48 crc kubenswrapper[4961]: W1205 17:47:48.748745 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc567a803_253f_4895_a504_caee7ba37c34.slice/crio-3e7510edc359bf9d3e07e95e61e871ae4d8ae6699d844a656455036dcd459d2b WatchSource:0}: Error finding container 3e7510edc359bf9d3e07e95e61e871ae4d8ae6699d844a656455036dcd459d2b: Status 404 returned error can't find the container with id 3e7510edc359bf9d3e07e95e61e871ae4d8ae6699d844a656455036dcd459d2b Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.948535 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv"] Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.972494 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw"] Dec 05 17:47:48 crc kubenswrapper[4961]: I1205 17:47:48.994875 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn"] Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.013471 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr"] Dec 05 17:47:49 crc kubenswrapper[4961]: W1205 17:47:49.014942 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda69e4847_13bc_4c1e_82a9_546fb11ad38d.slice/crio-592ef8666ceefa2f631fa122bff30c23e4c1e65e2e168089348964ad997a34da WatchSource:0}: Error finding container 592ef8666ceefa2f631fa122bff30c23e4c1e65e2e168089348964ad997a34da: Status 404 returned error can't find the container with id 592ef8666ceefa2f631fa122bff30c23e4c1e65e2e168089348964ad997a34da Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.028294 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p7vlw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-2b6j2_openstack-operators(a69e4847-13bc-4c1e-82a9-546fb11ad38d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.032848 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb"] Dec 05 17:47:49 crc kubenswrapper[4961]: W1205 17:47:49.033658 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode27c6cc8_ef18_421e_8a7b_1b6bb2227724.slice/crio-c9be913483e8afe66d0edd33a009fd9d0bb5d9a9c963142e199559f4beb02c61 WatchSource:0}: Error finding container c9be913483e8afe66d0edd33a009fd9d0bb5d9a9c963142e199559f4beb02c61: Status 404 returned error can't find the container with id c9be913483e8afe66d0edd33a009fd9d0bb5d9a9c963142e199559f4beb02c61 Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.033857 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-p7vlw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod octavia-operator-controller-manager-998648c74-2b6j2_openstack-operators(a69e4847-13bc-4c1e-82a9-546fb11ad38d): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.035076 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" podUID="a69e4847-13bc-4c1e-82a9-546fb11ad38d" Dec 05 17:47:49 crc kubenswrapper[4961]: W1205 17:47:49.035180 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod767519b7_2067_4fca_a96b_bf9b02e1b273.slice/crio-29f7a2ba31e067218e910b7daf18ab1b339ffd444300cb6bd5f224b215180ec0 WatchSource:0}: Error finding container 29f7a2ba31e067218e910b7daf18ab1b339ffd444300cb6bd5f224b215180ec0: Status 404 returned error can't find the container with id 29f7a2ba31e067218e910b7daf18ab1b339ffd444300cb6bd5f224b215180ec0 Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.035725 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sltp8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-4k7dd_openstack-operators(e27c6cc8-ef18-421e-8a7b-1b6bb2227724): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: W1205 17:47:49.039296 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39ba1343_9933_483d_aef2_90e0ceb14c79.slice/crio-56c68ea884483e57c3c2afe28c3979aea399f3c21b9a62fe8186872b82e1e6b3 WatchSource:0}: Error finding container 56c68ea884483e57c3c2afe28c3979aea399f3c21b9a62fe8186872b82e1e6b3: Status 404 returned error can't find the container with id 56c68ea884483e57c3c2afe28c3979aea399f3c21b9a62fe8186872b82e1e6b3 Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.039992 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-sltp8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod test-operator-controller-manager-5854674fcc-4k7dd_openstack-operators(e27c6cc8-ef18-421e-8a7b-1b6bb2227724): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.040019 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk"] Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.040711 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9gvvj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-c8x5r_openstack-operators(767519b7-2067-4fca-a96b-bf9b02e1b273): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.043153 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" podUID="e27c6cc8-ef18-421e-8a7b-1b6bb2227724" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.043996 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.044187 4961 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.044240 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert podName:ccea0c2d-817c-4895-b8a7-bf852bd12aa9 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:51.044223515 +0000 UTC m=+877.105373988 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" (UID: "ccea0c2d-817c-4895-b8a7-bf852bd12aa9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.045918 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2"] Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.046216 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-9gvvj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-operator-controller-manager-5f8c65bbfc-c8x5r_openstack-operators(767519b7-2067-4fca-a96b-bf9b02e1b273): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.046313 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:operator,Image:quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2,Command:[/manager],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:metrics,HostPort:0,ContainerPort:9782,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:OPERATOR_NAMESPACE,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:metadata.namespace,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{200 -3} {} 200m DecimalSI},memory: {{524288000 0} {} 500Mi BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8cdbq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000660000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cluster-operator-manager-668c99d594-4dbp7_openstack-operators(39ba1343-9933-483d-aef2-90e0ceb14c79): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.047285 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" podUID="767519b7-2067-4fca-a96b-bf9b02e1b273" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.047476 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ErrImagePull: \"pull QPS exceeded\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" podUID="39ba1343-9933-483d-aef2-90e0ceb14c79" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.049117 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wp9k8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-hgv2r_openstack-operators(e661480b-d6fd-4c16-9f03-f519092d05c6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.049473 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zfwxz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-dsmtk_openstack-operators(0770d71d-11ea-4b63-8a98-31521f395686): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.050359 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r"] Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.052965 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-zfwxz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod telemetry-operator-controller-manager-76cc84c6bb-dsmtk_openstack-operators(0770d71d-11ea-4b63-8a98-31521f395686): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.053095 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:kube-rbac-proxy,Image:quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0,Command:[],Args:[--secure-listen-address=0.0.0.0:8443 --upstream=http://127.0.0.1:8080/ --logtostderr=true --v=0],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:https,HostPort:0,ContainerPort:8443,Protocol:TCP,HostIP:,},},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{134217728 0} {} BinarySI},},Requests:ResourceList{cpu: {{5 -3} {} 5m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-wp9k8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod nova-operator-controller-manager-697bc559fc-hgv2r_openstack-operators(e661480b-d6fd-4c16-9f03-f519092d05c6): ErrImagePull: pull QPS exceeded" logger="UnhandledError" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.054465 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" podUID="e661480b-d6fd-4c16-9f03-f519092d05c6" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.054517 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"pull QPS exceeded\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ErrImagePull: \"pull QPS exceeded\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" podUID="0770d71d-11ea-4b63-8a98-31521f395686" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.070165 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7"] Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.081046 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd"] Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.096054 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r"] Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.145036 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.145085 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.145185 4961 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.145223 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:51.145210874 +0000 UTC m=+877.206361347 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "webhook-server-cert" not found Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.145498 4961 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.145526 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:51.145518681 +0000 UTC m=+877.206669154 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "metrics-server-cert" not found Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.269217 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" event={"ID":"0499fdb9-20d5-445c-9ca0-4492287fbcc0","Type":"ContainerStarted","Data":"54a24481d810b77d7b8d987910a13ab2f11886a7defd9ab63ed8c83ff0bb95d4"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.271473 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" event={"ID":"6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991","Type":"ContainerStarted","Data":"6da9615804eee4a1637946fc077fa1c59612ebb27b610ccae6c6cf0a1abc1a3b"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.272406 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" event={"ID":"90df3fec-9bc9-48ca-a432-374c1f7e2002","Type":"ContainerStarted","Data":"a06ef5ae3ab5ff3f5116797dcf16a3cc9b02830ffbabda2aaba987e69afddb97"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.273346 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" event={"ID":"406dca34-428b-493b-b564-511542c2bad6","Type":"ContainerStarted","Data":"d12b7784c90104557b89451f0e77790d3492576d50cce4ccaced3fcd533295b7"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.274181 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" event={"ID":"e27c6cc8-ef18-421e-8a7b-1b6bb2227724","Type":"ContainerStarted","Data":"c9be913483e8afe66d0edd33a009fd9d0bb5d9a9c963142e199559f4beb02c61"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.275215 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" event={"ID":"af9a8d55-8cff-40e2-9f1b-bbd05c3eea80","Type":"ContainerStarted","Data":"b162fa9999ae3afaa49b3b5edb57ff9b8e46d404d6848a0ea50c85c14cb88646"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.276244 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" event={"ID":"39ba1343-9933-483d-aef2-90e0ceb14c79","Type":"ContainerStarted","Data":"56c68ea884483e57c3c2afe28c3979aea399f3c21b9a62fe8186872b82e1e6b3"} Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.277502 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" podUID="39ba1343-9933-483d-aef2-90e0ceb14c79" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.277681 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" event={"ID":"0770d71d-11ea-4b63-8a98-31521f395686","Type":"ContainerStarted","Data":"f5a5adbc0638102dedae51591bf916c0d995a4ba062330d670cf20a03d469e67"} Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.278119 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" podUID="e27c6cc8-ef18-421e-8a7b-1b6bb2227724" Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.281720 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" podUID="0770d71d-11ea-4b63-8a98-31521f395686" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.281868 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" event={"ID":"2bb86b5f-1ee1-48c0-bcc1-60ca583c1339","Type":"ContainerStarted","Data":"631b6ccc474adbfa255e8e3f536df48687e6e93dcc082fe4317bf2ab8c1d2087"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.286146 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" event={"ID":"a69e4847-13bc-4c1e-82a9-546fb11ad38d","Type":"ContainerStarted","Data":"592ef8666ceefa2f631fa122bff30c23e4c1e65e2e168089348964ad997a34da"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.287290 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" event={"ID":"f7757573-1085-4560-880c-3d9b36ce93f7","Type":"ContainerStarted","Data":"16324ad1a476e2c159056881d3634251fe0034a391ddc72dc883ae8e9e44dffd"} Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.288266 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" podUID="a69e4847-13bc-4c1e-82a9-546fb11ad38d" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.288539 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" event={"ID":"f7ff9bd4-8a05-4a50-b38b-701451107b9f","Type":"ContainerStarted","Data":"f0e061182953c0fabb0c67c2ef2dafdbe679bd7cd78357298cbf94d1dd62376f"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.289751 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" event={"ID":"c567a803-253f-4895-a504-caee7ba37c34","Type":"ContainerStarted","Data":"3e7510edc359bf9d3e07e95e61e871ae4d8ae6699d844a656455036dcd459d2b"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.297259 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" event={"ID":"0b1fd140-fbf5-4d64-950b-b0bdcd07ec54","Type":"ContainerStarted","Data":"116df6707541a5c7b07ff73e8a48f915e052cba93f687d66080b55a160a7b2d7"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.299537 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" event={"ID":"7ff3fbef-1490-4ce9-b350-03a0a7182b78","Type":"ContainerStarted","Data":"cd9114d06fc083045dc495ddbfc0df614bb336c00904a22d9d648dec662dfc74"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.303404 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" event={"ID":"8fd2cc5d-67e4-4b9a-9d0c-58993491bc08","Type":"ContainerStarted","Data":"5414a29d0bb3eda68a5f5e7effa7aac6792f95341be793190813ead7cb743877"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.306298 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" event={"ID":"767519b7-2067-4fca-a96b-bf9b02e1b273","Type":"ContainerStarted","Data":"29f7a2ba31e067218e910b7daf18ab1b339ffd444300cb6bd5f224b215180ec0"} Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.309308 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" podUID="767519b7-2067-4fca-a96b-bf9b02e1b273" Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.309582 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" event={"ID":"e661480b-d6fd-4c16-9f03-f519092d05c6","Type":"ContainerStarted","Data":"f72e65f14fcdeb04f8b80aeb8eb86ff86475dd7b3479874c3be4783a422aa999"} Dec 05 17:47:49 crc kubenswrapper[4961]: I1205 17:47:49.310907 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" event={"ID":"4d42ce43-3c27-4007-a20b-e0068beb2490","Type":"ContainerStarted","Data":"aef45d41cda395d9fe1fcb4866854b63e3e90589901fb6c311c28f33c4ff1ab1"} Dec 05 17:47:49 crc kubenswrapper[4961]: E1205 17:47:49.314010 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" podUID="e661480b-d6fd-4c16-9f03-f519092d05c6" Dec 05 17:47:50 crc kubenswrapper[4961]: I1205 17:47:50.019261 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:50 crc kubenswrapper[4961]: I1205 17:47:50.019314 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:50 crc kubenswrapper[4961]: I1205 17:47:50.092191 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:50 crc kubenswrapper[4961]: I1205 17:47:50.259649 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.259875 4961 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.259975 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert podName:77f7ec48-3abf-4934-a703-fa3f5edfbd27 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:54.259950626 +0000 UTC m=+880.321101099 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert") pod "infra-operator-controller-manager-84b9cfc694-lml7r" (UID: "77f7ec48-3abf-4934-a703-fa3f5edfbd27") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.320241 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"operator\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/rabbitmq-cluster-operator@sha256:893e66303c1b0bc1d00a299a3f0380bad55c8dc813c8a1c6a4aab379f5aa12a2\\\"\"" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" podUID="39ba1343-9933-483d-aef2-90e0ceb14c79" Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.320240 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/swift-operator@sha256:2a3d21728a8bfb4e64617e63e61e2d1cb70a383ea3e8f846e0c3c3c02d2b0a9d\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" podUID="767519b7-2067-4fca-a96b-bf9b02e1b273" Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.320735 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/octavia-operator@sha256:d9a3694865a7d54ee96397add18c3898886e98d079aa20876a0f4de1fa7a7168\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" podUID="a69e4847-13bc-4c1e-82a9-546fb11ad38d" Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.320752 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/telemetry-operator@sha256:7d66757c0af67104f0389e851a7cc0daa44443ad202d157417bd86bbb57cc385\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" podUID="0770d71d-11ea-4b63-8a98-31521f395686" Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.321166 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/nova-operator@sha256:779f0cee6024d0fb8f259b036fe790e62aa5a3b0431ea9bf15a6e7d02e2e5670\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" podUID="e661480b-d6fd-4c16-9f03-f519092d05c6" Dec 05 17:47:50 crc kubenswrapper[4961]: E1205 17:47:50.321427 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"manager\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/test-operator@sha256:101b3e007d8c9f2e183262d7712f986ad51256448099069bc14f1ea5f997ab94\\\"\", failed to \"StartContainer\" for \"kube-rbac-proxy\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/openstack-k8s-operators/kube-rbac-proxy:v0.16.0\\\"\"]" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" podUID="e27c6cc8-ef18-421e-8a7b-1b6bb2227724" Dec 05 17:47:50 crc kubenswrapper[4961]: I1205 17:47:50.402524 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:47:50 crc kubenswrapper[4961]: I1205 17:47:50.467603 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqstj"] Dec 05 17:47:51 crc kubenswrapper[4961]: I1205 17:47:51.090645 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:51 crc kubenswrapper[4961]: E1205 17:47:51.090886 4961 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:51 crc kubenswrapper[4961]: E1205 17:47:51.091174 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert podName:ccea0c2d-817c-4895-b8a7-bf852bd12aa9 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:55.091149572 +0000 UTC m=+881.152300075 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" (UID: "ccea0c2d-817c-4895-b8a7-bf852bd12aa9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:51 crc kubenswrapper[4961]: I1205 17:47:51.192768 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:51 crc kubenswrapper[4961]: I1205 17:47:51.192890 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:51 crc kubenswrapper[4961]: E1205 17:47:51.193088 4961 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:47:51 crc kubenswrapper[4961]: E1205 17:47:51.193212 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:55.193175986 +0000 UTC m=+881.254326519 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "webhook-server-cert" not found Dec 05 17:47:51 crc kubenswrapper[4961]: E1205 17:47:51.193693 4961 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:47:51 crc kubenswrapper[4961]: E1205 17:47:51.193756 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:47:55.19373982 +0000 UTC m=+881.254890303 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "metrics-server-cert" not found Dec 05 17:47:52 crc kubenswrapper[4961]: I1205 17:47:52.332465 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pqstj" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="registry-server" containerID="cri-o://4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" gracePeriod=2 Dec 05 17:47:53 crc kubenswrapper[4961]: I1205 17:47:53.342010 4961 generic.go:334] "Generic (PLEG): container finished" podID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" exitCode=0 Dec 05 17:47:53 crc kubenswrapper[4961]: I1205 17:47:53.342053 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerDied","Data":"4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d"} Dec 05 17:47:54 crc kubenswrapper[4961]: I1205 17:47:54.343140 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:47:54 crc kubenswrapper[4961]: E1205 17:47:54.343324 4961 secret.go:188] Couldn't get secret openstack-operators/infra-operator-webhook-server-cert: secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:54 crc kubenswrapper[4961]: E1205 17:47:54.343649 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert podName:77f7ec48-3abf-4934-a703-fa3f5edfbd27 nodeName:}" failed. No retries permitted until 2025-12-05 17:48:02.343622328 +0000 UTC m=+888.404772801 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert") pod "infra-operator-controller-manager-84b9cfc694-lml7r" (UID: "77f7ec48-3abf-4934-a703-fa3f5edfbd27") : secret "infra-operator-webhook-server-cert" not found Dec 05 17:47:55 crc kubenswrapper[4961]: I1205 17:47:55.156116 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:47:55 crc kubenswrapper[4961]: E1205 17:47:55.156261 4961 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:55 crc kubenswrapper[4961]: E1205 17:47:55.156312 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert podName:ccea0c2d-817c-4895-b8a7-bf852bd12aa9 nodeName:}" failed. No retries permitted until 2025-12-05 17:48:03.156298186 +0000 UTC m=+889.217448659 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" (UID: "ccea0c2d-817c-4895-b8a7-bf852bd12aa9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:47:55 crc kubenswrapper[4961]: I1205 17:47:55.257110 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:55 crc kubenswrapper[4961]: I1205 17:47:55.257246 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:47:55 crc kubenswrapper[4961]: E1205 17:47:55.257270 4961 secret.go:188] Couldn't get secret openstack-operators/webhook-server-cert: secret "webhook-server-cert" not found Dec 05 17:47:55 crc kubenswrapper[4961]: E1205 17:47:55.257323 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:48:03.257307535 +0000 UTC m=+889.318458008 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "webhook-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "webhook-server-cert" not found Dec 05 17:47:55 crc kubenswrapper[4961]: E1205 17:47:55.257323 4961 secret.go:188] Couldn't get secret openstack-operators/metrics-server-cert: secret "metrics-server-cert" not found Dec 05 17:47:55 crc kubenswrapper[4961]: E1205 17:47:55.257354 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs podName:619cb5c4-1a5a-4eb8-ad2d-28615e0dc607 nodeName:}" failed. No retries permitted until 2025-12-05 17:48:03.257345746 +0000 UTC m=+889.318496219 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs") pod "openstack-operator-controller-manager-777bfdfd44-xwwmn" (UID: "619cb5c4-1a5a-4eb8-ad2d-28615e0dc607") : secret "metrics-server-cert" not found Dec 05 17:48:00 crc kubenswrapper[4961]: E1205 17:48:00.020332 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:48:00 crc kubenswrapper[4961]: E1205 17:48:00.021672 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:48:00 crc kubenswrapper[4961]: E1205 17:48:00.022364 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:48:00 crc kubenswrapper[4961]: E1205 17:48:00.022404 4961 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-pqstj" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="registry-server" Dec 05 17:48:02 crc kubenswrapper[4961]: I1205 17:48:02.364594 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:48:02 crc kubenswrapper[4961]: I1205 17:48:02.371494 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/77f7ec48-3abf-4934-a703-fa3f5edfbd27-cert\") pod \"infra-operator-controller-manager-84b9cfc694-lml7r\" (UID: \"77f7ec48-3abf-4934-a703-fa3f5edfbd27\") " pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:48:02 crc kubenswrapper[4961]: I1205 17:48:02.661552 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:48:03 crc kubenswrapper[4961]: E1205 17:48:03.177252 4961 secret.go:188] Couldn't get secret openstack-operators/openstack-baremetal-operator-webhook-server-cert: secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:48:03 crc kubenswrapper[4961]: E1205 17:48:03.177383 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert podName:ccea0c2d-817c-4895-b8a7-bf852bd12aa9 nodeName:}" failed. No retries permitted until 2025-12-05 17:48:19.177350702 +0000 UTC m=+905.238501215 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert") pod "openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" (UID: "ccea0c2d-817c-4895-b8a7-bf852bd12aa9") : secret "openstack-baremetal-operator-webhook-server-cert" not found Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.177074 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.183421 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jjckm"] Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.185139 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.197703 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jjckm"] Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.279473 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-catalog-content\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.279559 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.279658 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zv9zr\" (UniqueName: \"kubernetes.io/projected/b749bb70-6d63-47f6-8a9d-4b016baa1e49-kube-api-access-zv9zr\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.279697 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-utilities\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.279745 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.283630 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-metrics-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.284952 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/619cb5c4-1a5a-4eb8-ad2d-28615e0dc607-webhook-certs\") pod \"openstack-operator-controller-manager-777bfdfd44-xwwmn\" (UID: \"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607\") " pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.380438 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-utilities\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.380499 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-catalog-content\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.380998 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-utilities\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.381060 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-catalog-content\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.381393 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zv9zr\" (UniqueName: \"kubernetes.io/projected/b749bb70-6d63-47f6-8a9d-4b016baa1e49-kube-api-access-zv9zr\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.399496 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zv9zr\" (UniqueName: \"kubernetes.io/projected/b749bb70-6d63-47f6-8a9d-4b016baa1e49-kube-api-access-zv9zr\") pod \"certified-operators-jjckm\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.523889 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:03 crc kubenswrapper[4961]: I1205 17:48:03.529245 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:08 crc kubenswrapper[4961]: E1205 17:48:08.541740 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530" Dec 05 17:48:08 crc kubenswrapper[4961]: E1205 17:48:08.542952 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ironic-operator@sha256:0f523b7e2fa9e86fef986acf07d0c42d5658c475d565f11eaea926ebffcb6530,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-cswtv,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ironic-operator-controller-manager-6c548fd776-zjbvv_openstack-operators(2bb86b5f-1ee1-48c0-bcc1-60ca583c1339): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:09 crc kubenswrapper[4961]: E1205 17:48:09.355299 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801" Dec 05 17:48:09 crc kubenswrapper[4961]: E1205 17:48:09.355802 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/cinder-operator@sha256:1d60701214b39cdb0fa70bbe5710f9b131139a9f4b482c2db4058a04daefb801,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-m5cjz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-operator-controller-manager-859b6ccc6-nw6x8_openstack-operators(406dca34-428b-493b-b564-511542c2bad6): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:10 crc kubenswrapper[4961]: E1205 17:48:10.020501 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:48:10 crc kubenswrapper[4961]: E1205 17:48:10.021564 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:48:10 crc kubenswrapper[4961]: E1205 17:48:10.022615 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" cmd=["grpc_health_probe","-addr=:50051"] Dec 05 17:48:10 crc kubenswrapper[4961]: E1205 17:48:10.022654 4961 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-marketplace-pqstj" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="registry-server" Dec 05 17:48:12 crc kubenswrapper[4961]: E1205 17:48:12.267277 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59" Dec 05 17:48:12 crc kubenswrapper[4961]: E1205 17:48:12.267652 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/ovn-operator@sha256:635a4aef9d6f0b799e8ec91333dbb312160c001d05b3c63f614c124e0b67cb59,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-xpdt5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ovn-operator-controller-manager-b6456fdb6-jghkw_openstack-operators(8fd2cc5d-67e4-4b9a-9d0c-58993491bc08): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:12 crc kubenswrapper[4961]: E1205 17:48:12.800313 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7" Dec 05 17:48:12 crc kubenswrapper[4961]: E1205 17:48:12.800502 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/mariadb-operator@sha256:600ca007e493d3af0fcc2ebac92e8da5efd2afe812b62d7d3d4dd0115bdf05d7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-8q6bj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod mariadb-operator-controller-manager-56bbcc9d85-4whjc_openstack-operators(f7757573-1085-4560-880c-3d9b36ce93f7): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:13 crc kubenswrapper[4961]: E1205 17:48:13.381820 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f" Dec 05 17:48:13 crc kubenswrapper[4961]: E1205 17:48:13.382121 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/placement-operator@sha256:d29650b006da97eb9178fcc58f2eb9fead8c2b414fac18f86a3c3a1507488c4f,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-btm9j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod placement-operator-controller-manager-78f8948974-zbcdb_openstack-operators(7ff3fbef-1490-4ce9-b350-03a0a7182b78): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:14 crc kubenswrapper[4961]: E1205 17:48:14.437968 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5" Dec 05 17:48:14 crc kubenswrapper[4961]: E1205 17:48:14.438150 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/horizon-operator@sha256:9e847f4dbdea19ab997f32a02b3680a9bd966f9c705911645c3866a19fda9ea5,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-4fndk,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-operator-controller-manager-68c6d99b8f-gd5nk_openstack-operators(af9a8d55-8cff-40e2-9f1b-bbd05c3eea80): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.464208 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.516671 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pqstj" event={"ID":"4ebb57ac-af33-476b-afed-3f23988fe0da","Type":"ContainerDied","Data":"3cdcd5c653e35e22cc04ea19278aaee1388b9b4851ea7c3879b7aa04a15ae958"} Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.517126 4961 scope.go:117] "RemoveContainer" containerID="4e36fa3a09430bf0378f66d0927d68677980080cb07517a431051e27155c835d" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.516722 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pqstj" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.644450 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-utilities\") pod \"4ebb57ac-af33-476b-afed-3f23988fe0da\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.644499 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-catalog-content\") pod \"4ebb57ac-af33-476b-afed-3f23988fe0da\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.644562 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-blk5n\" (UniqueName: \"kubernetes.io/projected/4ebb57ac-af33-476b-afed-3f23988fe0da-kube-api-access-blk5n\") pod \"4ebb57ac-af33-476b-afed-3f23988fe0da\" (UID: \"4ebb57ac-af33-476b-afed-3f23988fe0da\") " Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.645248 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-utilities" (OuterVolumeSpecName: "utilities") pod "4ebb57ac-af33-476b-afed-3f23988fe0da" (UID: "4ebb57ac-af33-476b-afed-3f23988fe0da"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.662040 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ebb57ac-af33-476b-afed-3f23988fe0da-kube-api-access-blk5n" (OuterVolumeSpecName: "kube-api-access-blk5n") pod "4ebb57ac-af33-476b-afed-3f23988fe0da" (UID: "4ebb57ac-af33-476b-afed-3f23988fe0da"). InnerVolumeSpecName "kube-api-access-blk5n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.664717 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4ebb57ac-af33-476b-afed-3f23988fe0da" (UID: "4ebb57ac-af33-476b-afed-3f23988fe0da"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.746450 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-blk5n\" (UniqueName: \"kubernetes.io/projected/4ebb57ac-af33-476b-afed-3f23988fe0da-kube-api-access-blk5n\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.746484 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.746494 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4ebb57ac-af33-476b-afed-3f23988fe0da-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.856010 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqstj"] Dec 05 17:48:14 crc kubenswrapper[4961]: I1205 17:48:14.878399 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pqstj"] Dec 05 17:48:14 crc kubenswrapper[4961]: E1205 17:48:14.999686 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7" Dec 05 17:48:14 crc kubenswrapper[4961]: E1205 17:48:14.999906 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:manager,Image:quay.io/openstack-k8s-operators/keystone-operator@sha256:72ad6517987f674af0d0ae092cbb874aeae909c8b8b60188099c311762ebc8f7,Command:[/manager],Args:[--leader-elect --health-probe-bind-address=:8081 --metrics-bind-address=127.0.0.1:8080],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:LEASE_DURATION,Value:30,ValueFrom:nil,},EnvVar{Name:RENEW_DEADLINE,Value:20,ValueFrom:nil,},EnvVar{Name:RETRY_PERIOD,Value:5,ValueFrom:nil,},EnvVar{Name:ENABLE_WEBHOOKS,Value:false,ValueFrom:nil,},EnvVar{Name:METRICS_CERTS,Value:false,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{500 -3} {} 500m DecimalSI},memory: {{536870912 0} {} BinarySI},},Requests:ResourceList{cpu: {{10 -3} {} 10m DecimalSI},memory: {{268435456 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:kube-api-access-dg6jj,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/healthz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:15,TimeoutSeconds:1,PeriodSeconds:20,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:&HTTPGetAction{Path:/readyz,Port:{0 8081 },Host:,Scheme:HTTP,HTTPHeaders:[]HTTPHeader{},},TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:1,PeriodSeconds:10,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-operator-controller-manager-7765d96ddf-bnjfr_openstack-operators(0499fdb9-20d5-445c-9ca0-4492287fbcc0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:48:16 crc kubenswrapper[4961]: I1205 17:48:16.873496 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" path="/var/lib/kubelet/pods/4ebb57ac-af33-476b-afed-3f23988fe0da/volumes" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.234282 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.240696 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/ccea0c2d-817c-4895-b8a7-bf852bd12aa9-cert\") pod \"openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl\" (UID: \"ccea0c2d-817c-4895-b8a7-bf852bd12aa9\") " pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.396937 4961 scope.go:117] "RemoveContainer" containerID="7b413fd9c9c5c4ed59e9804ba7d4193bd79f474913ef128184ea0b52fc6dd644" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.421875 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"openstack-baremetal-operator-controller-manager-dockercfg-ndxxg" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.428608 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.763726 4961 scope.go:117] "RemoveContainer" containerID="b399b6b4d64b0dedeb462569c11e509de2a5b909472f44e857ef2cac58b89f94" Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.971395 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jjckm"] Dec 05 17:48:19 crc kubenswrapper[4961]: I1205 17:48:19.977002 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn"] Dec 05 17:48:20 crc kubenswrapper[4961]: I1205 17:48:20.041025 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r"] Dec 05 17:48:20 crc kubenswrapper[4961]: W1205 17:48:20.329118 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb749bb70_6d63_47f6_8a9d_4b016baa1e49.slice/crio-684161f3117a3204663efb1d9c4a573f8571508ebfb6a553f313f1858b6e44e7 WatchSource:0}: Error finding container 684161f3117a3204663efb1d9c4a573f8571508ebfb6a553f313f1858b6e44e7: Status 404 returned error can't find the container with id 684161f3117a3204663efb1d9c4a573f8571508ebfb6a553f313f1858b6e44e7 Dec 05 17:48:20 crc kubenswrapper[4961]: W1205 17:48:20.332253 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod619cb5c4_1a5a_4eb8_ad2d_28615e0dc607.slice/crio-366a0a3248207c066e77cb426d49efb9cbaa4ebd75fe450957bc2862b6f3387a WatchSource:0}: Error finding container 366a0a3248207c066e77cb426d49efb9cbaa4ebd75fe450957bc2862b6f3387a: Status 404 returned error can't find the container with id 366a0a3248207c066e77cb426d49efb9cbaa4ebd75fe450957bc2862b6f3387a Dec 05 17:48:20 crc kubenswrapper[4961]: W1205 17:48:20.333481 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod77f7ec48_3abf_4934_a703_fa3f5edfbd27.slice/crio-40ffbc15f0e4dc9dd2a58c2dd6375ef3a0097c3e890518e930c5f1868a198d0b WatchSource:0}: Error finding container 40ffbc15f0e4dc9dd2a58c2dd6375ef3a0097c3e890518e930c5f1868a198d0b: Status 404 returned error can't find the container with id 40ffbc15f0e4dc9dd2a58c2dd6375ef3a0097c3e890518e930c5f1868a198d0b Dec 05 17:48:20 crc kubenswrapper[4961]: I1205 17:48:20.570556 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" event={"ID":"77f7ec48-3abf-4934-a703-fa3f5edfbd27","Type":"ContainerStarted","Data":"40ffbc15f0e4dc9dd2a58c2dd6375ef3a0097c3e890518e930c5f1868a198d0b"} Dec 05 17:48:20 crc kubenswrapper[4961]: I1205 17:48:20.573796 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" event={"ID":"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607","Type":"ContainerStarted","Data":"366a0a3248207c066e77cb426d49efb9cbaa4ebd75fe450957bc2862b6f3387a"} Dec 05 17:48:20 crc kubenswrapper[4961]: I1205 17:48:20.575627 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jjckm" event={"ID":"b749bb70-6d63-47f6-8a9d-4b016baa1e49","Type":"ContainerStarted","Data":"684161f3117a3204663efb1d9c4a573f8571508ebfb6a553f313f1858b6e44e7"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.052499 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl"] Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.605418 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" event={"ID":"f7ff9bd4-8a05-4a50-b38b-701451107b9f","Type":"ContainerStarted","Data":"ea12413fbaf7d093610aab614ae3262afdf87a48f950182110ded09ce7834d93"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.610162 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" event={"ID":"c567a803-253f-4895-a504-caee7ba37c34","Type":"ContainerStarted","Data":"b014e4440f0dc975f3291af1f45491c928cd7b775d43dc55e2925a7e9da8b28d"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.611917 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" event={"ID":"705eb884-eb46-4d59-86ee-c2f1587d5df4","Type":"ContainerStarted","Data":"a6d9bf6e750415fadfc875b7594628c3306d46ad5eff6de3bc67dcaa599834ee"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.615209 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" event={"ID":"a69e4847-13bc-4c1e-82a9-546fb11ad38d","Type":"ContainerStarted","Data":"12b31dd8b373bd904b6e5f1300922733c63ad88b20dd9e748002a1921f20afd9"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.616507 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" event={"ID":"e661480b-d6fd-4c16-9f03-f519092d05c6","Type":"ContainerStarted","Data":"ac86dce201a4a26fad87a886c7f134dda502d90f31b458807c7361a10d495472"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.619049 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" event={"ID":"4d42ce43-3c27-4007-a20b-e0068beb2490","Type":"ContainerStarted","Data":"b32eeb6b1406035f60d9bc20e1a9dd8bc46e7acf82570344b993289a8926661b"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.620718 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" event={"ID":"ccea0c2d-817c-4895-b8a7-bf852bd12aa9","Type":"ContainerStarted","Data":"fe01bdbcd997fbe1daa5acea8bc76c563d322aa4fba3b193a0eaa5b83efc5e67"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.626537 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" event={"ID":"0770d71d-11ea-4b63-8a98-31521f395686","Type":"ContainerStarted","Data":"ea513536cd357bec5a4324b11e4d9c92f89185ccc4b27a8f6ffc220a39da79dc"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.628675 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" event={"ID":"0b1fd140-fbf5-4d64-950b-b0bdcd07ec54","Type":"ContainerStarted","Data":"cb4fa088be7b0f56a513797731f297d8f6cee9ebf88c4bb475f25e1504fc57fc"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.629979 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" event={"ID":"6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991","Type":"ContainerStarted","Data":"3cec20993bc856cf7bb7bade0cc21f0bfcc1379eed8d34d62d8f1522c9c40228"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.631430 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" event={"ID":"767519b7-2067-4fca-a96b-bf9b02e1b273","Type":"ContainerStarted","Data":"9ea42b4f41097ffdba9e7e7dcd12f0cbd53c7d736e33af3e9b2e2251bcb1d977"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.632961 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" event={"ID":"90df3fec-9bc9-48ca-a432-374c1f7e2002","Type":"ContainerStarted","Data":"414132c6411d9beed7e325ed8fcdc4eed4a9b0acccf6710cd8bc2eb498bd2bd1"} Dec 05 17:48:21 crc kubenswrapper[4961]: I1205 17:48:21.636071 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" event={"ID":"e27c6cc8-ef18-421e-8a7b-1b6bb2227724","Type":"ContainerStarted","Data":"64b0c8304e03b89f2a193015f2ba3784b3336f4294de73ce77b10f83e0c23bba"} Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.650589 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" event={"ID":"619cb5c4-1a5a-4eb8-ad2d-28615e0dc607","Type":"ContainerStarted","Data":"9748f982e2abc31acede50ed2d799a1bec2f6fe99c6491028a65c9d83be8ddf8"} Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.652821 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.655643 4961 generic.go:334] "Generic (PLEG): container finished" podID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerID="0da5117f53960763bf37757f038e6c0270f954122b1933f9370a4f9401434eed" exitCode=0 Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.655718 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jjckm" event={"ID":"b749bb70-6d63-47f6-8a9d-4b016baa1e49","Type":"ContainerDied","Data":"0da5117f53960763bf37757f038e6c0270f954122b1933f9370a4f9401434eed"} Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.657273 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" event={"ID":"39ba1343-9933-483d-aef2-90e0ceb14c79","Type":"ContainerStarted","Data":"7daf3d715bbcd83c1f0c93941ba3e5be2a4fd941e930bac741d4e5d572e5177d"} Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.687439 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" podStartSLOduration=35.687424199 podStartE2EDuration="35.687424199s" podCreationTimestamp="2025-12-05 17:47:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:48:22.682346126 +0000 UTC m=+908.743496619" watchObservedRunningTime="2025-12-05 17:48:22.687424199 +0000 UTC m=+908.748574672" Dec 05 17:48:22 crc kubenswrapper[4961]: I1205 17:48:22.706608 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-manager-668c99d594-4dbp7" podStartSLOduration=5.060121527 podStartE2EDuration="35.706592549s" podCreationTimestamp="2025-12-05 17:47:47 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.046258325 +0000 UTC m=+875.107408798" lastFinishedPulling="2025-12-05 17:48:19.692729347 +0000 UTC m=+905.753879820" observedRunningTime="2025-12-05 17:48:22.701590706 +0000 UTC m=+908.762741179" watchObservedRunningTime="2025-12-05 17:48:22.706592549 +0000 UTC m=+908.767743022" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.048659 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" podUID="7ff3fbef-1490-4ce9-b350-03a0a7182b78" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.074282 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" podUID="af9a8d55-8cff-40e2-9f1b-bbd05c3eea80" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.297014 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" podUID="2bb86b5f-1ee1-48c0-bcc1-60ca583c1339" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.307418 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" podUID="8fd2cc5d-67e4-4b9a-9d0c-58993491bc08" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.337841 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" podUID="0499fdb9-20d5-445c-9ca0-4492287fbcc0" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.382655 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" podUID="406dca34-428b-493b-b564-511542c2bad6" Dec 05 17:48:26 crc kubenswrapper[4961]: E1205 17:48:26.695170 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" podUID="f7757573-1085-4560-880c-3d9b36ce93f7" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.700977 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" event={"ID":"ccea0c2d-817c-4895-b8a7-bf852bd12aa9","Type":"ContainerStarted","Data":"2530d6f67541a71a88914b1e2a86dc8377622623eaf012637763b0ab9d1f8e74"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.709185 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" event={"ID":"8fd2cc5d-67e4-4b9a-9d0c-58993491bc08","Type":"ContainerStarted","Data":"e073790a949221895e4bc56adf25085c30fed6dd1004736a8e06a89883c4dd08"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.712486 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.721061 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" event={"ID":"4d42ce43-3c27-4007-a20b-e0068beb2490","Type":"ContainerStarted","Data":"269f6fba894a7e4e2b697bf7b87919f9e0670872ef6fd795d6183cc97c992e84"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.722382 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.725912 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.728032 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" event={"ID":"406dca34-428b-493b-b564-511542c2bad6","Type":"ContainerStarted","Data":"d74406b786dc07d02b40ad1077cc586d3fe651a6ac725139badde7c528de680b"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.747052 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" event={"ID":"f7757573-1085-4560-880c-3d9b36ce93f7","Type":"ContainerStarted","Data":"226afa61e98f264a3933539c0ffa90049d1405b3961b33f4667569fa68de0951"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.758384 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/heat-operator-controller-manager-5f64f6f8bb-95v4g" podStartSLOduration=3.17601406 podStartE2EDuration="40.758362522s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.372952781 +0000 UTC m=+874.434103254" lastFinishedPulling="2025-12-05 17:48:25.955301243 +0000 UTC m=+912.016451716" observedRunningTime="2025-12-05 17:48:26.75705747 +0000 UTC m=+912.818207973" watchObservedRunningTime="2025-12-05 17:48:26.758362522 +0000 UTC m=+912.819512995" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.759005 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" event={"ID":"2bb86b5f-1ee1-48c0-bcc1-60ca583c1339","Type":"ContainerStarted","Data":"39344de9ec6370a754ab4b1a05b6556126b23ed67549acf00408cbba6604db6b"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.772433 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" event={"ID":"90df3fec-9bc9-48ca-a432-374c1f7e2002","Type":"ContainerStarted","Data":"5783c6a35127ce22bcae8e0698cc197579428cefec7720646c97ff61e1068bee"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.773410 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.779413 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.809135 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" event={"ID":"f7ff9bd4-8a05-4a50-b38b-701451107b9f","Type":"ContainerStarted","Data":"cac9c31e3c98a1d28927fe721943afeec155ad7303da1be958ecb8be012abb44"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.809614 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.811402 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.838351 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" event={"ID":"0499fdb9-20d5-445c-9ca0-4492287fbcc0","Type":"ContainerStarted","Data":"20242480bd25a663c78d9ca1339885d5e3a46c26ce08c1bd8ce167f759ec4d46"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.847140 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" event={"ID":"c567a803-253f-4895-a504-caee7ba37c34","Type":"ContainerStarted","Data":"1e04c7ca26fd1f4fc429ed780b898fd6755a2dbd0fd7d4025910eaac28a429bf"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.848635 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.854286 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-7c79b5df47-q2p4h" podStartSLOduration=3.747087236 podStartE2EDuration="40.85427561s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.565410395 +0000 UTC m=+874.626560868" lastFinishedPulling="2025-12-05 17:48:25.672598769 +0000 UTC m=+911.733749242" observedRunningTime="2025-12-05 17:48:26.839601861 +0000 UTC m=+912.900752334" watchObservedRunningTime="2025-12-05 17:48:26.85427561 +0000 UTC m=+912.915426083" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.857437 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.857993 4961 generic.go:334] "Generic (PLEG): container finished" podID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerID="00833040fa8d9ee65829a213b1f6de590539988780e863f6daa9581c1a0067e2" exitCode=0 Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.858056 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jjckm" event={"ID":"b749bb70-6d63-47f6-8a9d-4b016baa1e49","Type":"ContainerDied","Data":"00833040fa8d9ee65829a213b1f6de590539988780e863f6daa9581c1a0067e2"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.914484 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" event={"ID":"e661480b-d6fd-4c16-9f03-f519092d05c6","Type":"ContainerStarted","Data":"7af74d7793b260de86e6aa6dabaae8cbcd801373f49aca1852812361c1c2f21b"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.914536 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.919080 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.924103 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" event={"ID":"af9a8d55-8cff-40e2-9f1b-bbd05c3eea80","Type":"ContainerStarted","Data":"7cf53cb53689648a6382176064cfb0a843593254d2c31dc583cb5980e5bbced7"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.942957 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" event={"ID":"77f7ec48-3abf-4934-a703-fa3f5edfbd27","Type":"ContainerStarted","Data":"622e88a83cc9c643e500f475adda252fe9ac052307203e1fb9b6d689f4767020"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.944802 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/neutron-operator-controller-manager-5fdfd5b6b5-pszjn" podStartSLOduration=4.236300181 podStartE2EDuration="40.944786347s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.988473021 +0000 UTC m=+875.049623504" lastFinishedPulling="2025-12-05 17:48:25.696959177 +0000 UTC m=+911.758109670" observedRunningTime="2025-12-05 17:48:26.939144559 +0000 UTC m=+913.000295032" watchObservedRunningTime="2025-12-05 17:48:26.944786347 +0000 UTC m=+913.005936820" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.955338 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" event={"ID":"7ff3fbef-1490-4ce9-b350-03a0a7182b78","Type":"ContainerStarted","Data":"ca6f5d2db75863344153cf7ad8b589750a6f43fac806dd935991f52aa6f7bf6b"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.967683 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" event={"ID":"a69e4847-13bc-4c1e-82a9-546fb11ad38d","Type":"ContainerStarted","Data":"0be7aac72d7d59b025a428dd39b7f8313cbd1fb7f4af8ac007325f0c87d94b39"} Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.969482 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:48:26 crc kubenswrapper[4961]: I1205 17:48:26.970259 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" Dec 05 17:48:27 crc kubenswrapper[4961]: I1205 17:48:27.004707 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/watcher-operator-controller-manager-769dc69bc-2jz8d" podStartSLOduration=3.049245084 podStartE2EDuration="40.004687974s" podCreationTimestamp="2025-12-05 17:47:47 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.753415548 +0000 UTC m=+874.814566021" lastFinishedPulling="2025-12-05 17:48:25.708858438 +0000 UTC m=+911.770008911" observedRunningTime="2025-12-05 17:48:27.003411913 +0000 UTC m=+913.064562396" watchObservedRunningTime="2025-12-05 17:48:27.004687974 +0000 UTC m=+913.065838447" Dec 05 17:48:27 crc kubenswrapper[4961]: I1205 17:48:27.046302 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/nova-operator-controller-manager-697bc559fc-hgv2r" podStartSLOduration=4.582871659 podStartE2EDuration="41.046282592s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.048948742 +0000 UTC m=+875.110099215" lastFinishedPulling="2025-12-05 17:48:25.512359675 +0000 UTC m=+911.573510148" observedRunningTime="2025-12-05 17:48:27.036464822 +0000 UTC m=+913.097615295" watchObservedRunningTime="2025-12-05 17:48:27.046282592 +0000 UTC m=+913.107433065" Dec 05 17:48:27 crc kubenswrapper[4961]: I1205 17:48:27.139441 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/octavia-operator-controller-manager-998648c74-2b6j2" podStartSLOduration=4.463183784 podStartE2EDuration="41.139418594s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.028132588 +0000 UTC m=+875.089283061" lastFinishedPulling="2025-12-05 17:48:25.704367388 +0000 UTC m=+911.765517871" observedRunningTime="2025-12-05 17:48:27.109737847 +0000 UTC m=+913.170888340" watchObservedRunningTime="2025-12-05 17:48:27.139418594 +0000 UTC m=+913.200569067" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.001147 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" event={"ID":"e27c6cc8-ef18-421e-8a7b-1b6bb2227724","Type":"ContainerStarted","Data":"53643c1e8f887463cfb335bd1f9f782d59d564dc99ee16945d34ba0e25580ff2"} Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.002664 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.007739 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.047321 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/test-operator-controller-manager-5854674fcc-4k7dd" podStartSLOduration=5.096552679 podStartE2EDuration="42.047305709s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.035588113 +0000 UTC m=+875.096738586" lastFinishedPulling="2025-12-05 17:48:25.986341143 +0000 UTC m=+912.047491616" observedRunningTime="2025-12-05 17:48:28.044208133 +0000 UTC m=+914.105358606" watchObservedRunningTime="2025-12-05 17:48:28.047305709 +0000 UTC m=+914.108456182" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.050406 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" event={"ID":"0770d71d-11ea-4b63-8a98-31521f395686","Type":"ContainerStarted","Data":"5ef688787e989c11769342cf47bbe473bfff1f3bcb48afd53298f9b2538df1a1"} Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.050851 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.052929 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.053376 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" event={"ID":"77f7ec48-3abf-4934-a703-fa3f5edfbd27","Type":"ContainerStarted","Data":"ec8d53ef341ea4f09a090c110a311657f1f53fc374166125e652c143543ecf4e"} Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.053808 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.055725 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.057017 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" event={"ID":"705eb884-eb46-4d59-86ee-c2f1587d5df4","Type":"ContainerStarted","Data":"fb8fbe7ffda46c30a0066c72b81ca7f566ec543f48a95559e930834cda0d45ea"} Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.057406 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.057528 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.058997 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.061176 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.062942 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.064979 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.229245 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.284839 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" podStartSLOduration=37.126219806 podStartE2EDuration="42.284815726s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:48:20.340914121 +0000 UTC m=+906.402064594" lastFinishedPulling="2025-12-05 17:48:25.499510041 +0000 UTC m=+911.560660514" observedRunningTime="2025-12-05 17:48:28.26045039 +0000 UTC m=+914.321600863" watchObservedRunningTime="2025-12-05 17:48:28.284815726 +0000 UTC m=+914.345966219" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.333346 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" podStartSLOduration=5.671882579 podStartE2EDuration="42.333326564s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.040569915 +0000 UTC m=+875.101720388" lastFinishedPulling="2025-12-05 17:48:25.7020139 +0000 UTC m=+911.763164373" observedRunningTime="2025-12-05 17:48:28.302883098 +0000 UTC m=+914.364033581" watchObservedRunningTime="2025-12-05 17:48:28.333326564 +0000 UTC m=+914.394477037" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.335174 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/telemetry-operator-controller-manager-76cc84c6bb-dsmtk" podStartSLOduration=5.444793178 podStartE2EDuration="42.335163849s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.048807848 +0000 UTC m=+875.109958321" lastFinishedPulling="2025-12-05 17:48:25.939178519 +0000 UTC m=+912.000328992" observedRunningTime="2025-12-05 17:48:28.322591611 +0000 UTC m=+914.383742104" watchObservedRunningTime="2025-12-05 17:48:28.335163849 +0000 UTC m=+914.396314322" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.343965 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" podStartSLOduration=5.078999198 podStartE2EDuration="42.343949594s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.583249995 +0000 UTC m=+874.644400468" lastFinishedPulling="2025-12-05 17:48:25.848200391 +0000 UTC m=+911.909350864" observedRunningTime="2025-12-05 17:48:28.337730922 +0000 UTC m=+914.398881395" watchObservedRunningTime="2025-12-05 17:48:28.343949594 +0000 UTC m=+914.405100067" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.365206 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/barbican-operator-controller-manager-7d9dfd778-cnqj9" podStartSLOduration=4.58802652 podStartE2EDuration="42.365181494s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:47.847048941 +0000 UTC m=+873.908199414" lastFinishedPulling="2025-12-05 17:48:25.624203915 +0000 UTC m=+911.685354388" observedRunningTime="2025-12-05 17:48:28.361648258 +0000 UTC m=+914.422798731" watchObservedRunningTime="2025-12-05 17:48:28.365181494 +0000 UTC m=+914.426331967" Dec 05 17:48:28 crc kubenswrapper[4961]: I1205 17:48:28.393940 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" podStartSLOduration=4.87653943 podStartE2EDuration="42.393923148s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.396383989 +0000 UTC m=+874.457534462" lastFinishedPulling="2025-12-05 17:48:25.913767707 +0000 UTC m=+911.974918180" observedRunningTime="2025-12-05 17:48:28.392176936 +0000 UTC m=+914.453327419" watchObservedRunningTime="2025-12-05 17:48:28.393923148 +0000 UTC m=+914.455073621" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.085855 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" event={"ID":"f7757573-1085-4560-880c-3d9b36ce93f7","Type":"ContainerStarted","Data":"a15dea224ba2b323776081bee5b9bb1a651a416c5df72b6e6f1a0113516f052d"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.086662 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.088527 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" event={"ID":"ccea0c2d-817c-4895-b8a7-bf852bd12aa9","Type":"ContainerStarted","Data":"89132cc4ece56b012183a1200585657b41e6c70e48b1bfae9832deb8d45e3164"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.088944 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.098350 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/glance-operator-controller-manager-77987cd8cd-dlwzz" event={"ID":"0b1fd140-fbf5-4d64-950b-b0bdcd07ec54","Type":"ContainerStarted","Data":"3330d2b7a5d0778b57c6941cc2eddb6e9fed761e716fa0bb4d6c317f3ebf57dd"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.104928 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" event={"ID":"7ff3fbef-1490-4ce9-b350-03a0a7182b78","Type":"ContainerStarted","Data":"a626772f37de1f5765d5ad64c472375c7b72a54982c659f4c188b536a24d48a4"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.105067 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.110612 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" event={"ID":"406dca34-428b-493b-b564-511542c2bad6","Type":"ContainerStarted","Data":"0b77faa7eed3625f338681b5f38c5e4a7c057370626a93d16e943f4edf5e64b9"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.111442 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.118945 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" podStartSLOduration=3.521275007 podStartE2EDuration="43.118933575s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.573676859 +0000 UTC m=+874.634827332" lastFinishedPulling="2025-12-05 17:48:28.171335427 +0000 UTC m=+914.232485900" observedRunningTime="2025-12-05 17:48:29.112465776 +0000 UTC m=+915.173616249" watchObservedRunningTime="2025-12-05 17:48:29.118933575 +0000 UTC m=+915.180084048" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.128210 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" event={"ID":"af9a8d55-8cff-40e2-9f1b-bbd05c3eea80","Type":"ContainerStarted","Data":"139bc6a6891bb56139bfc0c243d20f21cc1b13b41cf1e66c0d8d1d6e7733895a"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.128874 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.137386 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" podStartSLOduration=3.429345611 podStartE2EDuration="43.137372936s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.419816517 +0000 UTC m=+874.480966990" lastFinishedPulling="2025-12-05 17:48:28.127843852 +0000 UTC m=+914.188994315" observedRunningTime="2025-12-05 17:48:29.134239779 +0000 UTC m=+915.195390252" watchObservedRunningTime="2025-12-05 17:48:29.137372936 +0000 UTC m=+915.198523399" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.141093 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" event={"ID":"2bb86b5f-1ee1-48c0-bcc1-60ca583c1339","Type":"ContainerStarted","Data":"d0f0ff0368cf102853e78882960935c97698346f4a68eff412141fc2d084e084"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.141692 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.146947 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" event={"ID":"0499fdb9-20d5-445c-9ca0-4492287fbcc0","Type":"ContainerStarted","Data":"a1a4c46e5d2730b5815c0a0e500cc0f6bfbc37ece1cc74d1ad8e6c56a5386120"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.147565 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.158602 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/designate-operator-controller-manager-78b4bc895b-qnmsr" event={"ID":"6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991","Type":"ContainerStarted","Data":"8ed8b8b2f39794693c7a363df475f756fbd6709b59e2e27134f56a80fd72db49"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.162072 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" event={"ID":"8fd2cc5d-67e4-4b9a-9d0c-58993491bc08","Type":"ContainerStarted","Data":"c8716786d575aaa5bd9ddcf915d25cf06f05c0b05922221c09d0eb72c7542308"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.162667 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.172742 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" podStartSLOduration=38.841188596 podStartE2EDuration="43.172723101s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:48:21.111498212 +0000 UTC m=+907.172648685" lastFinishedPulling="2025-12-05 17:48:25.443032727 +0000 UTC m=+911.504183190" observedRunningTime="2025-12-05 17:48:29.163050264 +0000 UTC m=+915.224200737" watchObservedRunningTime="2025-12-05 17:48:29.172723101 +0000 UTC m=+915.233873574" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.180161 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/swift-operator-controller-manager-5f8c65bbfc-c8x5r" event={"ID":"767519b7-2067-4fca-a96b-bf9b02e1b273","Type":"ContainerStarted","Data":"a73b3ab93eb6824f616e11b2a228026f7a38d63e1bfc8516a3f4bc11e8aca938"} Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.184471 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" podStartSLOduration=3.9034067329999997 podStartE2EDuration="43.184455699s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.01357555 +0000 UTC m=+875.074726023" lastFinishedPulling="2025-12-05 17:48:28.294624516 +0000 UTC m=+914.355774989" observedRunningTime="2025-12-05 17:48:29.181473536 +0000 UTC m=+915.242624009" watchObservedRunningTime="2025-12-05 17:48:29.184455699 +0000 UTC m=+915.245606162" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.212890 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" podStartSLOduration=3.9740640320000002 podStartE2EDuration="43.212873195s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:49.008154756 +0000 UTC m=+875.069305239" lastFinishedPulling="2025-12-05 17:48:28.246963929 +0000 UTC m=+914.308114402" observedRunningTime="2025-12-05 17:48:29.20981187 +0000 UTC m=+915.270962353" watchObservedRunningTime="2025-12-05 17:48:29.212873195 +0000 UTC m=+915.274023668" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.241421 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" podStartSLOduration=3.8687356040000003 podStartE2EDuration="43.241408104s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.950237709 +0000 UTC m=+875.011388182" lastFinishedPulling="2025-12-05 17:48:28.322910209 +0000 UTC m=+914.384060682" observedRunningTime="2025-12-05 17:48:29.240612505 +0000 UTC m=+915.301762978" watchObservedRunningTime="2025-12-05 17:48:29.241408104 +0000 UTC m=+915.302558577" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.258954 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" podStartSLOduration=4.069275721 podStartE2EDuration="43.258933663s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.994455518 +0000 UTC m=+875.055605991" lastFinishedPulling="2025-12-05 17:48:28.18411346 +0000 UTC m=+914.245263933" observedRunningTime="2025-12-05 17:48:29.256431832 +0000 UTC m=+915.317582315" watchObservedRunningTime="2025-12-05 17:48:29.258933663 +0000 UTC m=+915.320084136" Dec 05 17:48:29 crc kubenswrapper[4961]: I1205 17:48:29.274444 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" podStartSLOduration=3.503984382 podStartE2EDuration="43.274428183s" podCreationTimestamp="2025-12-05 17:47:46 +0000 UTC" firstStartedPulling="2025-12-05 17:47:48.55666154 +0000 UTC m=+874.617812023" lastFinishedPulling="2025-12-05 17:48:28.327105351 +0000 UTC m=+914.388255824" observedRunningTime="2025-12-05 17:48:29.27105603 +0000 UTC m=+915.332206513" watchObservedRunningTime="2025-12-05 17:48:29.274428183 +0000 UTC m=+915.335578656" Dec 05 17:48:30 crc kubenswrapper[4961]: I1205 17:48:30.193382 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jjckm" event={"ID":"b749bb70-6d63-47f6-8a9d-4b016baa1e49","Type":"ContainerStarted","Data":"2c80cb4c5d4eb8c5d7a405f7f8c20f0016f60adf3ed2b7f0517078f675606953"} Dec 05 17:48:30 crc kubenswrapper[4961]: I1205 17:48:30.206845 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl" Dec 05 17:48:30 crc kubenswrapper[4961]: I1205 17:48:30.309985 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jjckm" podStartSLOduration=22.185410518 podStartE2EDuration="27.309967904s" podCreationTimestamp="2025-12-05 17:48:03 +0000 UTC" firstStartedPulling="2025-12-05 17:48:23.866354323 +0000 UTC m=+909.927504796" lastFinishedPulling="2025-12-05 17:48:28.990911709 +0000 UTC m=+915.052062182" observedRunningTime="2025-12-05 17:48:30.308224631 +0000 UTC m=+916.369375124" watchObservedRunningTime="2025-12-05 17:48:30.309967904 +0000 UTC m=+916.371118377" Dec 05 17:48:32 crc kubenswrapper[4961]: I1205 17:48:32.667961 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-84b9cfc694-lml7r" Dec 05 17:48:33 crc kubenswrapper[4961]: I1205 17:48:33.524469 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:33 crc kubenswrapper[4961]: I1205 17:48:33.525078 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:33 crc kubenswrapper[4961]: I1205 17:48:33.537950 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/openstack-operator-controller-manager-777bfdfd44-xwwmn" Dec 05 17:48:33 crc kubenswrapper[4961]: I1205 17:48:33.590251 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:34 crc kubenswrapper[4961]: I1205 17:48:34.273470 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:34 crc kubenswrapper[4961]: I1205 17:48:34.384788 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jjckm"] Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.237193 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jjckm" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="registry-server" containerID="cri-o://2c80cb4c5d4eb8c5d7a405f7f8c20f0016f60adf3ed2b7f0517078f675606953" gracePeriod=2 Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.644972 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/cinder-operator-controller-manager-859b6ccc6-nw6x8" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.807047 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vctlq"] Dec 05 17:48:36 crc kubenswrapper[4961]: E1205 17:48:36.807438 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="registry-server" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.807462 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="registry-server" Dec 05 17:48:36 crc kubenswrapper[4961]: E1205 17:48:36.807512 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="extract-utilities" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.807523 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="extract-utilities" Dec 05 17:48:36 crc kubenswrapper[4961]: E1205 17:48:36.807559 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="extract-content" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.807568 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="extract-content" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.807746 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ebb57ac-af33-476b-afed-3f23988fe0da" containerName="registry-server" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.809034 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.812887 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vctlq"] Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.902579 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/horizon-operator-controller-manager-68c6d99b8f-gd5nk" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.973463 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-catalog-content\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.973610 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-utilities\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:36 crc kubenswrapper[4961]: I1205 17:48:36.973658 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz2rl\" (UniqueName: \"kubernetes.io/projected/9c6dad26-ad88-41f9-b656-86d08062a614-kube-api-access-xz2rl\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.075471 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-catalog-content\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.075574 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-utilities\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.075604 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz2rl\" (UniqueName: \"kubernetes.io/projected/9c6dad26-ad88-41f9-b656-86d08062a614-kube-api-access-xz2rl\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.076114 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-catalog-content\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.076169 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-utilities\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.097055 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz2rl\" (UniqueName: \"kubernetes.io/projected/9c6dad26-ad88-41f9-b656-86d08062a614-kube-api-access-xz2rl\") pod \"redhat-operators-vctlq\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.128494 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.374701 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-56bbcc9d85-4whjc" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.379441 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-7765d96ddf-bnjfr" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.422401 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ironic-operator-controller-manager-6c548fd776-zjbvv" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.609641 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/ovn-operator-controller-manager-b6456fdb6-jghkw" Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.610088 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vctlq"] Dec 05 17:48:37 crc kubenswrapper[4961]: W1205 17:48:37.615756 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9c6dad26_ad88_41f9_b656_86d08062a614.slice/crio-43f325fca948fe99dbbc802b4194bfa4a1f27e72a84b1ef224e5a974e523b9a9 WatchSource:0}: Error finding container 43f325fca948fe99dbbc802b4194bfa4a1f27e72a84b1ef224e5a974e523b9a9: Status 404 returned error can't find the container with id 43f325fca948fe99dbbc802b4194bfa4a1f27e72a84b1ef224e5a974e523b9a9 Dec 05 17:48:37 crc kubenswrapper[4961]: I1205 17:48:37.694768 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/placement-operator-controller-manager-78f8948974-zbcdb" Dec 05 17:48:38 crc kubenswrapper[4961]: I1205 17:48:38.266097 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerStarted","Data":"43f325fca948fe99dbbc802b4194bfa4a1f27e72a84b1ef224e5a974e523b9a9"} Dec 05 17:48:39 crc kubenswrapper[4961]: I1205 17:48:39.288486 4961 generic.go:334] "Generic (PLEG): container finished" podID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerID="2c80cb4c5d4eb8c5d7a405f7f8c20f0016f60adf3ed2b7f0517078f675606953" exitCode=0 Dec 05 17:48:39 crc kubenswrapper[4961]: I1205 17:48:39.288584 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jjckm" event={"ID":"b749bb70-6d63-47f6-8a9d-4b016baa1e49","Type":"ContainerDied","Data":"2c80cb4c5d4eb8c5d7a405f7f8c20f0016f60adf3ed2b7f0517078f675606953"} Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.191714 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.221243 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-utilities\") pod \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.221356 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zv9zr\" (UniqueName: \"kubernetes.io/projected/b749bb70-6d63-47f6-8a9d-4b016baa1e49-kube-api-access-zv9zr\") pod \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.221452 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-catalog-content\") pod \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\" (UID: \"b749bb70-6d63-47f6-8a9d-4b016baa1e49\") " Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.223888 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-utilities" (OuterVolumeSpecName: "utilities") pod "b749bb70-6d63-47f6-8a9d-4b016baa1e49" (UID: "b749bb70-6d63-47f6-8a9d-4b016baa1e49"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.232164 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b749bb70-6d63-47f6-8a9d-4b016baa1e49-kube-api-access-zv9zr" (OuterVolumeSpecName: "kube-api-access-zv9zr") pod "b749bb70-6d63-47f6-8a9d-4b016baa1e49" (UID: "b749bb70-6d63-47f6-8a9d-4b016baa1e49"). InnerVolumeSpecName "kube-api-access-zv9zr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.276583 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b749bb70-6d63-47f6-8a9d-4b016baa1e49" (UID: "b749bb70-6d63-47f6-8a9d-4b016baa1e49"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.296424 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jjckm" event={"ID":"b749bb70-6d63-47f6-8a9d-4b016baa1e49","Type":"ContainerDied","Data":"684161f3117a3204663efb1d9c4a573f8571508ebfb6a553f313f1858b6e44e7"} Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.296474 4961 scope.go:117] "RemoveContainer" containerID="2c80cb4c5d4eb8c5d7a405f7f8c20f0016f60adf3ed2b7f0517078f675606953" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.296645 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jjckm" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.321125 4961 scope.go:117] "RemoveContainer" containerID="00833040fa8d9ee65829a213b1f6de590539988780e863f6daa9581c1a0067e2" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.322785 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zv9zr\" (UniqueName: \"kubernetes.io/projected/b749bb70-6d63-47f6-8a9d-4b016baa1e49-kube-api-access-zv9zr\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.322813 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.322826 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b749bb70-6d63-47f6-8a9d-4b016baa1e49-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.337295 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jjckm"] Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.343373 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jjckm"] Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.373110 4961 scope.go:117] "RemoveContainer" containerID="0da5117f53960763bf37757f038e6c0270f954122b1933f9370a4f9401434eed" Dec 05 17:48:40 crc kubenswrapper[4961]: I1205 17:48:40.871966 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" path="/var/lib/kubelet/pods/b749bb70-6d63-47f6-8a9d-4b016baa1e49/volumes" Dec 05 17:48:46 crc kubenswrapper[4961]: I1205 17:48:46.351434 4961 generic.go:334] "Generic (PLEG): container finished" podID="9c6dad26-ad88-41f9-b656-86d08062a614" containerID="dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674" exitCode=0 Dec 05 17:48:46 crc kubenswrapper[4961]: I1205 17:48:46.351498 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerDied","Data":"dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674"} Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.359465 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerStarted","Data":"38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7"} Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.940999 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-6q2jw"] Dec 05 17:48:47 crc kubenswrapper[4961]: E1205 17:48:47.941647 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="registry-server" Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.941670 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="registry-server" Dec 05 17:48:47 crc kubenswrapper[4961]: E1205 17:48:47.941718 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="extract-utilities" Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.941727 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="extract-utilities" Dec 05 17:48:47 crc kubenswrapper[4961]: E1205 17:48:47.941744 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="extract-content" Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.941754 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="extract-content" Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.941976 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b749bb70-6d63-47f6-8a9d-4b016baa1e49" containerName="registry-server" Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.943456 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:47 crc kubenswrapper[4961]: I1205 17:48:47.964647 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6q2jw"] Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.051502 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5jks5\" (UniqueName: \"kubernetes.io/projected/5326dd56-9862-4aa7-b139-58d70055b703-kube-api-access-5jks5\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.051856 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-catalog-content\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.052095 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-utilities\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.153177 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-catalog-content\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.153508 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-utilities\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.153620 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5jks5\" (UniqueName: \"kubernetes.io/projected/5326dd56-9862-4aa7-b139-58d70055b703-kube-api-access-5jks5\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.153956 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-catalog-content\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.154250 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-utilities\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.176700 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5jks5\" (UniqueName: \"kubernetes.io/projected/5326dd56-9862-4aa7-b139-58d70055b703-kube-api-access-5jks5\") pod \"community-operators-6q2jw\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.261574 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:48 crc kubenswrapper[4961]: I1205 17:48:48.684696 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-6q2jw"] Dec 05 17:48:49 crc kubenswrapper[4961]: I1205 17:48:49.379661 4961 generic.go:334] "Generic (PLEG): container finished" podID="5326dd56-9862-4aa7-b139-58d70055b703" containerID="c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203" exitCode=0 Dec 05 17:48:49 crc kubenswrapper[4961]: I1205 17:48:49.380003 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerDied","Data":"c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203"} Dec 05 17:48:49 crc kubenswrapper[4961]: I1205 17:48:49.380028 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerStarted","Data":"221fb172cc4fd80a31f95825d5e3d434d09315f201583bd2100d955cba699d93"} Dec 05 17:48:49 crc kubenswrapper[4961]: I1205 17:48:49.382764 4961 generic.go:334] "Generic (PLEG): container finished" podID="9c6dad26-ad88-41f9-b656-86d08062a614" containerID="38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7" exitCode=0 Dec 05 17:48:49 crc kubenswrapper[4961]: I1205 17:48:49.382820 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerDied","Data":"38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7"} Dec 05 17:48:50 crc kubenswrapper[4961]: I1205 17:48:50.396452 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerStarted","Data":"ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5"} Dec 05 17:48:50 crc kubenswrapper[4961]: I1205 17:48:50.398497 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerStarted","Data":"3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a"} Dec 05 17:48:50 crc kubenswrapper[4961]: I1205 17:48:50.437753 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vctlq" podStartSLOduration=10.902751689 podStartE2EDuration="14.437729335s" podCreationTimestamp="2025-12-05 17:48:36 +0000 UTC" firstStartedPulling="2025-12-05 17:48:46.352972274 +0000 UTC m=+932.414122747" lastFinishedPulling="2025-12-05 17:48:49.88794991 +0000 UTC m=+935.949100393" observedRunningTime="2025-12-05 17:48:50.434726372 +0000 UTC m=+936.495876865" watchObservedRunningTime="2025-12-05 17:48:50.437729335 +0000 UTC m=+936.498879808" Dec 05 17:48:51 crc kubenswrapper[4961]: I1205 17:48:51.407130 4961 generic.go:334] "Generic (PLEG): container finished" podID="5326dd56-9862-4aa7-b139-58d70055b703" containerID="ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5" exitCode=0 Dec 05 17:48:51 crc kubenswrapper[4961]: I1205 17:48:51.407180 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerDied","Data":"ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5"} Dec 05 17:48:52 crc kubenswrapper[4961]: I1205 17:48:52.415710 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerStarted","Data":"a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2"} Dec 05 17:48:52 crc kubenswrapper[4961]: I1205 17:48:52.512233 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-6q2jw" podStartSLOduration=2.993023284 podStartE2EDuration="5.512213752s" podCreationTimestamp="2025-12-05 17:48:47 +0000 UTC" firstStartedPulling="2025-12-05 17:48:49.382835459 +0000 UTC m=+935.443985942" lastFinishedPulling="2025-12-05 17:48:51.902025937 +0000 UTC m=+937.963176410" observedRunningTime="2025-12-05 17:48:52.51090484 +0000 UTC m=+938.572055333" watchObservedRunningTime="2025-12-05 17:48:52.512213752 +0000 UTC m=+938.573364225" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.469947 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sk59h"] Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.471357 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.475543 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.475797 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dnsmasq-dns-dockercfg-fqf2b" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.475906 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"kube-root-ca.crt" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.476032 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openshift-service-ca.crt" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.479686 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sk59h"] Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.560097 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h74jc\" (UniqueName: \"kubernetes.io/projected/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-kube-api-access-h74jc\") pod \"dnsmasq-dns-675f4bcbfc-sk59h\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.560155 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-config\") pod \"dnsmasq-dns-675f4bcbfc-sk59h\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.577869 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ds5zx"] Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.583468 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.585322 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-svc" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.601086 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ds5zx"] Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.660978 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.661043 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h74jc\" (UniqueName: \"kubernetes.io/projected/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-kube-api-access-h74jc\") pod \"dnsmasq-dns-675f4bcbfc-sk59h\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.661095 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-config\") pod \"dnsmasq-dns-675f4bcbfc-sk59h\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.661178 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mmjm\" (UniqueName: \"kubernetes.io/projected/9416be69-29a1-4785-8f71-78c22f6c33ff-kube-api-access-8mmjm\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.661209 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-config\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.662745 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-config\") pod \"dnsmasq-dns-675f4bcbfc-sk59h\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.683306 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h74jc\" (UniqueName: \"kubernetes.io/projected/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-kube-api-access-h74jc\") pod \"dnsmasq-dns-675f4bcbfc-sk59h\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.762298 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.762757 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mmjm\" (UniqueName: \"kubernetes.io/projected/9416be69-29a1-4785-8f71-78c22f6c33ff-kube-api-access-8mmjm\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.762818 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-config\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.763745 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-dns-svc\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.764371 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-config\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.780669 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mmjm\" (UniqueName: \"kubernetes.io/projected/9416be69-29a1-4785-8f71-78c22f6c33ff-kube-api-access-8mmjm\") pod \"dnsmasq-dns-78dd6ddcc-ds5zx\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.808732 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:48:55 crc kubenswrapper[4961]: I1205 17:48:55.911128 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:48:56 crc kubenswrapper[4961]: I1205 17:48:56.449369 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sk59h"] Dec 05 17:48:56 crc kubenswrapper[4961]: W1205 17:48:56.458645 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod682c1877_0a97_4c80_ae9b_de6bebbb1b3d.slice/crio-9967e21f9a22f404dc56fdfdc51a86c5a26a770e52df2b5230c3a2c8111d3752 WatchSource:0}: Error finding container 9967e21f9a22f404dc56fdfdc51a86c5a26a770e52df2b5230c3a2c8111d3752: Status 404 returned error can't find the container with id 9967e21f9a22f404dc56fdfdc51a86c5a26a770e52df2b5230c3a2c8111d3752 Dec 05 17:48:56 crc kubenswrapper[4961]: I1205 17:48:56.756787 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ds5zx"] Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.129760 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.130530 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.186742 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.449927 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" event={"ID":"9416be69-29a1-4785-8f71-78c22f6c33ff","Type":"ContainerStarted","Data":"0fc4bf7259022b5148e233d27109b7081fd95659f7db57a2c82036bd6e80b688"} Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.451196 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" event={"ID":"682c1877-0a97-4c80-ae9b-de6bebbb1b3d","Type":"ContainerStarted","Data":"9967e21f9a22f404dc56fdfdc51a86c5a26a770e52df2b5230c3a2c8111d3752"} Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.490714 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:48:57 crc kubenswrapper[4961]: I1205 17:48:57.531398 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vctlq"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.261998 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.262289 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.334915 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.512684 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.596286 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sk59h"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.620190 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-t5s6v"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.621391 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.634581 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-t5s6v"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.704689 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-dns-svc\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.704789 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rbm49\" (UniqueName: \"kubernetes.io/projected/6daed413-edbd-4f1e-8a9e-2d51d9223af2-kube-api-access-rbm49\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.704811 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-config\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.805909 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-dns-svc\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.806288 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rbm49\" (UniqueName: \"kubernetes.io/projected/6daed413-edbd-4f1e-8a9e-2d51d9223af2-kube-api-access-rbm49\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.806321 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-config\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.806926 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-dns-svc\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.807197 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-config\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.834628 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rbm49\" (UniqueName: \"kubernetes.io/projected/6daed413-edbd-4f1e-8a9e-2d51d9223af2-kube-api-access-rbm49\") pod \"dnsmasq-dns-666b6646f7-t5s6v\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.859033 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ds5zx"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.899289 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qnvkq"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.901334 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.912744 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qnvkq"] Dec 05 17:48:58 crc kubenswrapper[4961]: I1205 17:48:58.937331 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.008271 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-config\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.008645 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.008828 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77ggr\" (UniqueName: \"kubernetes.io/projected/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-kube-api-access-77ggr\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.109710 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-config\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.109767 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.109807 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77ggr\" (UniqueName: \"kubernetes.io/projected/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-kube-api-access-77ggr\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.110913 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-config\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.110968 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-dns-svc\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.141995 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77ggr\" (UniqueName: \"kubernetes.io/projected/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-kube-api-access-77ggr\") pod \"dnsmasq-dns-57d769cc4f-qnvkq\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.216708 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.416515 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-t5s6v"] Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.527261 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vctlq" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="registry-server" containerID="cri-o://3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a" gracePeriod=2 Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.825218 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6q2jw"] Dec 05 17:48:59 crc kubenswrapper[4961]: I1205 17:48:59.907622 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qnvkq"] Dec 05 17:48:59 crc kubenswrapper[4961]: W1205 17:48:59.911515 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod7c79b87d_94a7_47d6_aaa5_3d420ce891d0.slice/crio-940c4c5cf92a3946f00019d74891e324d0bbacfd54f763576227c7b185510743 WatchSource:0}: Error finding container 940c4c5cf92a3946f00019d74891e324d0bbacfd54f763576227c7b185510743: Status 404 returned error can't find the container with id 940c4c5cf92a3946f00019d74891e324d0bbacfd54f763576227c7b185510743 Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.560572 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" event={"ID":"6daed413-edbd-4f1e-8a9e-2d51d9223af2","Type":"ContainerStarted","Data":"e3c6c3cb326609e61117a4b79a1dd7b6b702f8b4c2d5dfe1a9e4bb08d17af3e6"} Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.565130 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" event={"ID":"7c79b87d-94a7-47d6-aaa5-3d420ce891d0","Type":"ContainerStarted","Data":"940c4c5cf92a3946f00019d74891e324d0bbacfd54f763576227c7b185510743"} Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.854630 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.856397 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.860876 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.860895 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.861104 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.861327 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.861432 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.861884 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.862012 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-fh7kv" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.878070 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.881008 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.884077 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.884269 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.885539 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-rjwv6" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.885804 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.885652 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.886018 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.886060 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.909207 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.916990 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.951355 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qbtnq\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-kube-api-access-qbtnq\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.951690 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-config-data\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.951763 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.951815 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/346da897-3e71-4d6f-b17d-fe5f905dd705-pod-info\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.951953 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.952021 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.952041 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-server-conf\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.952062 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.952084 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.952162 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:00 crc kubenswrapper[4961]: I1205 17:49:00.952193 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/346da897-3e71-4d6f-b17d-fe5f905dd705-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053608 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053681 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qbtnq\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-kube-api-access-qbtnq\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053740 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053761 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-config-data\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053810 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053841 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053888 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/346da897-3e71-4d6f-b17d-fe5f905dd705-pod-info\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053918 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053967 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.053991 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054012 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-server-conf\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054047 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054070 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054111 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054154 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054192 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054225 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054247 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054287 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054308 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/346da897-3e71-4d6f-b17d-fe5f905dd705-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054324 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.054368 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5wnfm\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-kube-api-access-5wnfm\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.055244 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.056101 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.058512 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.060985 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-config-data\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.061260 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.061850 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.062229 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.062239 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-server-conf\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.064759 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/346da897-3e71-4d6f-b17d-fe5f905dd705-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.078590 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/346da897-3e71-4d6f-b17d-fe5f905dd705-pod-info\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.079727 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qbtnq\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-kube-api-access-qbtnq\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.082316 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.155441 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5wnfm\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-kube-api-access-5wnfm\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.155513 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.156077 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.156118 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.156532 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.156625 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158154 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158489 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158526 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158666 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158618 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158812 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158840 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.158999 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.159170 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.159998 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.160387 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.160614 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.164299 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.174744 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5wnfm\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-kube-api-access-5wnfm\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.188177 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.234349 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.264957 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.282410 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.447531 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-galera-0"] Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.449097 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.456406 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-svc" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.458985 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config-data" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.459260 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-scripts" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.459588 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-dockercfg-cv6qg" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.468505 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"combined-ca-bundle" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.478061 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.519453 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.543334 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.574951 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.574992 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/78297e26-2e01-4bb1-8f35-c96861dfda09-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.575030 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78297e26-2e01-4bb1-8f35-c96861dfda09-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.575071 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-config-data-default\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.575092 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78297e26-2e01-4bb1-8f35-c96861dfda09-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.575163 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.575201 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-kolla-config\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.575290 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x9f5l\" (UniqueName: \"kubernetes.io/projected/78297e26-2e01-4bb1-8f35-c96861dfda09-kube-api-access-x9f5l\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.588631 4961 generic.go:334] "Generic (PLEG): container finished" podID="9c6dad26-ad88-41f9-b656-86d08062a614" containerID="3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a" exitCode=0 Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.588944 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-6q2jw" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="registry-server" containerID="cri-o://a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2" gracePeriod=2 Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.589169 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vctlq" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.589154 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerDied","Data":"3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a"} Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.589216 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vctlq" event={"ID":"9c6dad26-ad88-41f9-b656-86d08062a614","Type":"ContainerDied","Data":"43f325fca948fe99dbbc802b4194bfa4a1f27e72a84b1ef224e5a974e523b9a9"} Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.589242 4961 scope.go:117] "RemoveContainer" containerID="3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.657942 4961 scope.go:117] "RemoveContainer" containerID="38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.676727 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz2rl\" (UniqueName: \"kubernetes.io/projected/9c6dad26-ad88-41f9-b656-86d08062a614-kube-api-access-xz2rl\") pod \"9c6dad26-ad88-41f9-b656-86d08062a614\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.676786 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-catalog-content\") pod \"9c6dad26-ad88-41f9-b656-86d08062a614\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.676847 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-utilities\") pod \"9c6dad26-ad88-41f9-b656-86d08062a614\" (UID: \"9c6dad26-ad88-41f9-b656-86d08062a614\") " Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677064 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-config-data-default\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677087 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78297e26-2e01-4bb1-8f35-c96861dfda09-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677127 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677156 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-kolla-config\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677182 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x9f5l\" (UniqueName: \"kubernetes.io/projected/78297e26-2e01-4bb1-8f35-c96861dfda09-kube-api-access-x9f5l\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677223 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677240 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/78297e26-2e01-4bb1-8f35-c96861dfda09-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677264 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78297e26-2e01-4bb1-8f35-c96861dfda09-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.677649 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78297e26-2e01-4bb1-8f35-c96861dfda09-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.678102 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") device mount path \"/mnt/openstack/pv02\"" pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.678999 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-kolla-config\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.679476 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-config-data-default\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.680295 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-utilities" (OuterVolumeSpecName: "utilities") pod "9c6dad26-ad88-41f9-b656-86d08062a614" (UID: "9c6dad26-ad88-41f9-b656-86d08062a614"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.681835 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78297e26-2e01-4bb1-8f35-c96861dfda09-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.683142 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9c6dad26-ad88-41f9-b656-86d08062a614-kube-api-access-xz2rl" (OuterVolumeSpecName: "kube-api-access-xz2rl") pod "9c6dad26-ad88-41f9-b656-86d08062a614" (UID: "9c6dad26-ad88-41f9-b656-86d08062a614"). InnerVolumeSpecName "kube-api-access-xz2rl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.691150 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/78297e26-2e01-4bb1-8f35-c96861dfda09-galera-tls-certs\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.692247 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78297e26-2e01-4bb1-8f35-c96861dfda09-combined-ca-bundle\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.695546 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x9f5l\" (UniqueName: \"kubernetes.io/projected/78297e26-2e01-4bb1-8f35-c96861dfda09-kube-api-access-x9f5l\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.717655 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-0\" (UID: \"78297e26-2e01-4bb1-8f35-c96861dfda09\") " pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.776525 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-galera-0" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.778660 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.778693 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz2rl\" (UniqueName: \"kubernetes.io/projected/9c6dad26-ad88-41f9-b656-86d08062a614-kube-api-access-xz2rl\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.819134 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.876516 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9c6dad26-ad88-41f9-b656-86d08062a614" (UID: "9c6dad26-ad88-41f9-b656-86d08062a614"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.879484 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9c6dad26-ad88-41f9-b656-86d08062a614-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.885004 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.894051 4961 scope.go:117] "RemoveContainer" containerID="dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674" Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.960711 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vctlq"] Dec 05 17:49:01 crc kubenswrapper[4961]: I1205 17:49:01.984442 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vctlq"] Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.049920 4961 scope.go:117] "RemoveContainer" containerID="3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a" Dec 05 17:49:02 crc kubenswrapper[4961]: E1205 17:49:02.050427 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a\": container with ID starting with 3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a not found: ID does not exist" containerID="3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.050454 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a"} err="failed to get container status \"3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a\": rpc error: code = NotFound desc = could not find container \"3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a\": container with ID starting with 3960e22e0cc952cbcd19f1aa91cbbaec98b57f4dd7e6b26ffbce97063646c83a not found: ID does not exist" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.050474 4961 scope.go:117] "RemoveContainer" containerID="38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7" Dec 05 17:49:02 crc kubenswrapper[4961]: E1205 17:49:02.053695 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7\": container with ID starting with 38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7 not found: ID does not exist" containerID="38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.053761 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7"} err="failed to get container status \"38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7\": rpc error: code = NotFound desc = could not find container \"38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7\": container with ID starting with 38356102192d9d46556ef3e56213d27bd6a0ce414926375119c60ef664d9e4f7 not found: ID does not exist" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.053926 4961 scope.go:117] "RemoveContainer" containerID="dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674" Dec 05 17:49:02 crc kubenswrapper[4961]: E1205 17:49:02.056168 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674\": container with ID starting with dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674 not found: ID does not exist" containerID="dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.056193 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674"} err="failed to get container status \"dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674\": rpc error: code = NotFound desc = could not find container \"dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674\": container with ID starting with dea340d361b659f95503cb176bc6a0c481f592db5db784ebeabf2bff5ad65674 not found: ID does not exist" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.363919 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.496841 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-catalog-content\") pod \"5326dd56-9862-4aa7-b139-58d70055b703\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.496992 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-utilities\") pod \"5326dd56-9862-4aa7-b139-58d70055b703\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.497020 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5jks5\" (UniqueName: \"kubernetes.io/projected/5326dd56-9862-4aa7-b139-58d70055b703-kube-api-access-5jks5\") pod \"5326dd56-9862-4aa7-b139-58d70055b703\" (UID: \"5326dd56-9862-4aa7-b139-58d70055b703\") " Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.503094 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-utilities" (OuterVolumeSpecName: "utilities") pod "5326dd56-9862-4aa7-b139-58d70055b703" (UID: "5326dd56-9862-4aa7-b139-58d70055b703"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.608992 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.615300 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5326dd56-9862-4aa7-b139-58d70055b703-kube-api-access-5jks5" (OuterVolumeSpecName: "kube-api-access-5jks5") pod "5326dd56-9862-4aa7-b139-58d70055b703" (UID: "5326dd56-9862-4aa7-b139-58d70055b703"). InnerVolumeSpecName "kube-api-access-5jks5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.667129 4961 generic.go:334] "Generic (PLEG): container finished" podID="5326dd56-9862-4aa7-b139-58d70055b703" containerID="a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2" exitCode=0 Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.667243 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerDied","Data":"a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2"} Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.667274 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-6q2jw" event={"ID":"5326dd56-9862-4aa7-b139-58d70055b703","Type":"ContainerDied","Data":"221fb172cc4fd80a31f95825d5e3d434d09315f201583bd2100d955cba699d93"} Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.667294 4961 scope.go:117] "RemoveContainer" containerID="a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.667424 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-6q2jw" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.667624 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-galera-0"] Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.679508 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"494ff2b1-6bb3-4c8a-be81-02fe6f884caa","Type":"ContainerStarted","Data":"9e3d6fa198439a0af090b9ae9cf7ef5748981436ec9f32f35251f0b456a126c6"} Dec 05 17:49:02 crc kubenswrapper[4961]: W1205 17:49:02.688684 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78297e26_2e01_4bb1_8f35_c96861dfda09.slice/crio-6c0b3d182d65b290244888b18b8dc235e429ba2c3bbf5f8fa79e36f4d81d6ac4 WatchSource:0}: Error finding container 6c0b3d182d65b290244888b18b8dc235e429ba2c3bbf5f8fa79e36f4d81d6ac4: Status 404 returned error can't find the container with id 6c0b3d182d65b290244888b18b8dc235e429ba2c3bbf5f8fa79e36f4d81d6ac4 Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.698709 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"346da897-3e71-4d6f-b17d-fe5f905dd705","Type":"ContainerStarted","Data":"9aef265e29d7b76dff61bc89ef13712094236b400da739a012b8e1f204bfca11"} Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.709546 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5jks5\" (UniqueName: \"kubernetes.io/projected/5326dd56-9862-4aa7-b139-58d70055b703-kube-api-access-5jks5\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.709657 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5326dd56-9862-4aa7-b139-58d70055b703" (UID: "5326dd56-9862-4aa7-b139-58d70055b703"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.717400 4961 scope.go:117] "RemoveContainer" containerID="ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.805890 4961 scope.go:117] "RemoveContainer" containerID="c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.810701 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5326dd56-9862-4aa7-b139-58d70055b703-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.876374 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" path="/var/lib/kubelet/pods/9c6dad26-ad88-41f9-b656-86d08062a614/volumes" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.904119 4961 scope.go:117] "RemoveContainer" containerID="a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2" Dec 05 17:49:02 crc kubenswrapper[4961]: E1205 17:49:02.904531 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2\": container with ID starting with a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2 not found: ID does not exist" containerID="a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.904570 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2"} err="failed to get container status \"a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2\": rpc error: code = NotFound desc = could not find container \"a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2\": container with ID starting with a1e7db1654a8f5dd5fdc1e3948bde7bafee3ff4f7ea4696d933c8f4cc4d967f2 not found: ID does not exist" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.904593 4961 scope.go:117] "RemoveContainer" containerID="ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5" Dec 05 17:49:02 crc kubenswrapper[4961]: E1205 17:49:02.905261 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5\": container with ID starting with ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5 not found: ID does not exist" containerID="ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.905294 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5"} err="failed to get container status \"ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5\": rpc error: code = NotFound desc = could not find container \"ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5\": container with ID starting with ee245eedb0881741ac14b006fd47029d0fb0ee8991ac4818888f3754f675b6b5 not found: ID does not exist" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.905310 4961 scope.go:117] "RemoveContainer" containerID="c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203" Dec 05 17:49:02 crc kubenswrapper[4961]: E1205 17:49:02.906189 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203\": container with ID starting with c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203 not found: ID does not exist" containerID="c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203" Dec 05 17:49:02 crc kubenswrapper[4961]: I1205 17:49:02.906211 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203"} err="failed to get container status \"c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203\": rpc error: code = NotFound desc = could not find container \"c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203\": container with ID starting with c30c76abce6997829f28b2de31bf77bff2251ac788f322055a2b7520a5f19203 not found: ID does not exist" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.003646 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-6q2jw"] Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.016259 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-6q2jw"] Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027296 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 17:49:03 crc kubenswrapper[4961]: E1205 17:49:03.027590 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="extract-content" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027614 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="extract-content" Dec 05 17:49:03 crc kubenswrapper[4961]: E1205 17:49:03.027634 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="extract-utilities" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027641 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="extract-utilities" Dec 05 17:49:03 crc kubenswrapper[4961]: E1205 17:49:03.027658 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="extract-content" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027664 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="extract-content" Dec 05 17:49:03 crc kubenswrapper[4961]: E1205 17:49:03.027676 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="registry-server" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027682 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="registry-server" Dec 05 17:49:03 crc kubenswrapper[4961]: E1205 17:49:03.027693 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="extract-utilities" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027700 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="extract-utilities" Dec 05 17:49:03 crc kubenswrapper[4961]: E1205 17:49:03.027714 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="registry-server" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027720 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="registry-server" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027884 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9c6dad26-ad88-41f9-b656-86d08062a614" containerName="registry-server" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.027913 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5326dd56-9862-4aa7-b139-58d70055b703" containerName="registry-server" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.028688 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.032075 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-config-data" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.032483 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"galera-openstack-cell1-dockercfg-s94c4" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.032973 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-galera-openstack-cell1-svc" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.039379 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-cell1-scripts" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.056421 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.185522 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/memcached-0"] Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.195093 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.197269 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"memcached-memcached-dockercfg-j5xkv" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.197327 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"memcached-config-data" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.197274 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-memcached-svc" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.201658 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219003 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ztcp5\" (UniqueName: \"kubernetes.io/projected/786a3535-1c16-4389-9239-49f6d349c3af-kube-api-access-ztcp5\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219133 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219166 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219197 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219223 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/786a3535-1c16-4389-9239-49f6d349c3af-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219251 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/786a3535-1c16-4389-9239-49f6d349c3af-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219491 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/786a3535-1c16-4389-9239-49f6d349c3af-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.219523 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321066 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcwpw\" (UniqueName: \"kubernetes.io/projected/4c503456-1649-444c-a321-687b4294d2fa-kube-api-access-jcwpw\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321148 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c503456-1649-444c-a321-687b4294d2fa-kolla-config\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321179 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321266 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321335 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321376 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/786a3535-1c16-4389-9239-49f6d349c3af-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321410 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c503456-1649-444c-a321-687b4294d2fa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321431 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c503456-1649-444c-a321-687b4294d2fa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321477 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/786a3535-1c16-4389-9239-49f6d349c3af-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321645 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c503456-1649-444c-a321-687b4294d2fa-config-data\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321703 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/786a3535-1c16-4389-9239-49f6d349c3af-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321733 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.321806 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ztcp5\" (UniqueName: \"kubernetes.io/projected/786a3535-1c16-4389-9239-49f6d349c3af-kube-api-access-ztcp5\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.322012 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-kolla-config\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.322437 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-config-data-default\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.322507 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/786a3535-1c16-4389-9239-49f6d349c3af-operator-scripts\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.322561 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") device mount path \"/mnt/openstack/pv11\"" pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.324004 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/786a3535-1c16-4389-9239-49f6d349c3af-config-data-generated\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.326629 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"galera-tls-certs\" (UniqueName: \"kubernetes.io/secret/786a3535-1c16-4389-9239-49f6d349c3af-galera-tls-certs\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.339725 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/786a3535-1c16-4389-9239-49f6d349c3af-combined-ca-bundle\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.354293 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ztcp5\" (UniqueName: \"kubernetes.io/projected/786a3535-1c16-4389-9239-49f6d349c3af-kube-api-access-ztcp5\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.361279 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-cell1-galera-0\" (UID: \"786a3535-1c16-4389-9239-49f6d349c3af\") " pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.400301 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstack-cell1-galera-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.423514 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcwpw\" (UniqueName: \"kubernetes.io/projected/4c503456-1649-444c-a321-687b4294d2fa-kube-api-access-jcwpw\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.423579 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c503456-1649-444c-a321-687b4294d2fa-kolla-config\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.423617 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c503456-1649-444c-a321-687b4294d2fa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.423636 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c503456-1649-444c-a321-687b4294d2fa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.423687 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c503456-1649-444c-a321-687b4294d2fa-config-data\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.424454 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/4c503456-1649-444c-a321-687b4294d2fa-config-data\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.425097 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/4c503456-1649-444c-a321-687b4294d2fa-kolla-config\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.428398 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4c503456-1649-444c-a321-687b4294d2fa-combined-ca-bundle\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.455127 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcwpw\" (UniqueName: \"kubernetes.io/projected/4c503456-1649-444c-a321-687b4294d2fa-kube-api-access-jcwpw\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.455807 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memcached-tls-certs\" (UniqueName: \"kubernetes.io/secret/4c503456-1649-444c-a321-687b4294d2fa-memcached-tls-certs\") pod \"memcached-0\" (UID: \"4c503456-1649-444c-a321-687b4294d2fa\") " pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.527915 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/memcached-0" Dec 05 17:49:03 crc kubenswrapper[4961]: I1205 17:49:03.728493 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78297e26-2e01-4bb1-8f35-c96861dfda09","Type":"ContainerStarted","Data":"6c0b3d182d65b290244888b18b8dc235e429ba2c3bbf5f8fa79e36f4d81d6ac4"} Dec 05 17:49:04 crc kubenswrapper[4961]: I1205 17:49:04.009863 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstack-cell1-galera-0"] Dec 05 17:49:04 crc kubenswrapper[4961]: I1205 17:49:04.130395 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/memcached-0"] Dec 05 17:49:04 crc kubenswrapper[4961]: I1205 17:49:04.800830 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"786a3535-1c16-4389-9239-49f6d349c3af","Type":"ContainerStarted","Data":"1220aad614b1ffe78f4ee6b88448915139404f3ad225c509291d37c609afafbd"} Dec 05 17:49:04 crc kubenswrapper[4961]: I1205 17:49:04.802457 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"4c503456-1649-444c-a321-687b4294d2fa","Type":"ContainerStarted","Data":"ff2ec0e5878147ec4f133535c300c33ec1bc5f9abe5cfaf39553c73d212397cd"} Dec 05 17:49:04 crc kubenswrapper[4961]: I1205 17:49:04.889876 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5326dd56-9862-4aa7-b139-58d70055b703" path="/var/lib/kubelet/pods/5326dd56-9862-4aa7-b139-58d70055b703/volumes" Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.193573 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.194629 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.198269 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"telemetry-ceilometer-dockercfg-tqjzp" Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.213625 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.267638 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7jfzd\" (UniqueName: \"kubernetes.io/projected/5123c9de-dcdc-4189-8bad-330610afddd3-kube-api-access-7jfzd\") pod \"kube-state-metrics-0\" (UID: \"5123c9de-dcdc-4189-8bad-330610afddd3\") " pod="openstack/kube-state-metrics-0" Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.369106 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7jfzd\" (UniqueName: \"kubernetes.io/projected/5123c9de-dcdc-4189-8bad-330610afddd3-kube-api-access-7jfzd\") pod \"kube-state-metrics-0\" (UID: \"5123c9de-dcdc-4189-8bad-330610afddd3\") " pod="openstack/kube-state-metrics-0" Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.389636 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7jfzd\" (UniqueName: \"kubernetes.io/projected/5123c9de-dcdc-4189-8bad-330610afddd3-kube-api-access-7jfzd\") pod \"kube-state-metrics-0\" (UID: \"5123c9de-dcdc-4189-8bad-330610afddd3\") " pod="openstack/kube-state-metrics-0" Dec 05 17:49:05 crc kubenswrapper[4961]: I1205 17:49:05.536005 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:49:06 crc kubenswrapper[4961]: I1205 17:49:06.212668 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:49:06 crc kubenswrapper[4961]: W1205 17:49:06.234505 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5123c9de_dcdc_4189_8bad_330610afddd3.slice/crio-d48eff7bd430a8d53a0ade02089eab30f6398f879b082e1a458caf261f4fda7b WatchSource:0}: Error finding container d48eff7bd430a8d53a0ade02089eab30f6398f879b082e1a458caf261f4fda7b: Status 404 returned error can't find the container with id d48eff7bd430a8d53a0ade02089eab30f6398f879b082e1a458caf261f4fda7b Dec 05 17:49:06 crc kubenswrapper[4961]: I1205 17:49:06.818190 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5123c9de-dcdc-4189-8bad-330610afddd3","Type":"ContainerStarted","Data":"d48eff7bd430a8d53a0ade02089eab30f6398f879b082e1a458caf261f4fda7b"} Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.621968 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ldph7"] Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.623664 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.628510 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-scripts" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.628618 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncontroller-ovncontroller-dockercfg-tq22p" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.630120 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovncontroller-ovndbs" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.632892 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ldph7"] Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.674407 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ovs-c9ff2"] Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.676846 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.689930 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-c9ff2"] Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701278 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-run-ovn\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701343 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-combined-ca-bundle\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701366 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l8qc6\" (UniqueName: \"kubernetes.io/projected/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-kube-api-access-l8qc6\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701388 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-ovn-controller-tls-certs\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701413 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-scripts\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701467 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-run\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.701489 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-log-ovn\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803003 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-run\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803043 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-log\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803073 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-lib\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803098 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-scripts\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803227 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bxg6\" (UniqueName: \"kubernetes.io/projected/db7a99f8-4e0e-408b-9b96-39340c35d4d8-kube-api-access-2bxg6\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803342 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-run\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803383 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-etc-ovs\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803403 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-log-ovn\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803464 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-run-ovn\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803535 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7a99f8-4e0e-408b-9b96-39340c35d4d8-scripts\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803867 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-combined-ca-bundle\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.803899 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l8qc6\" (UniqueName: \"kubernetes.io/projected/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-kube-api-access-l8qc6\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.804099 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-ovn-controller-tls-certs\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.804867 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-run-ovn\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.805612 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-run\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.805869 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-var-log-ovn\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.807252 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-scripts\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.818565 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-combined-ca-bundle\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.819033 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-controller-tls-certs\" (UniqueName: \"kubernetes.io/secret/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-ovn-controller-tls-certs\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.821887 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l8qc6\" (UniqueName: \"kubernetes.io/projected/6b6b1e99-e081-4c93-8fe8-c693eb7a0205-kube-api-access-l8qc6\") pod \"ovn-controller-ldph7\" (UID: \"6b6b1e99-e081-4c93-8fe8-c693eb7a0205\") " pod="openstack/ovn-controller-ldph7" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.905966 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7a99f8-4e0e-408b-9b96-39340c35d4d8-scripts\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.906054 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-log\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.906109 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-run\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.906134 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-lib\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.906194 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bxg6\" (UniqueName: \"kubernetes.io/projected/db7a99f8-4e0e-408b-9b96-39340c35d4d8-kube-api-access-2bxg6\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.906240 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-etc-ovs\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.906689 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-run\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.907153 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-lib\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.907280 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ovs\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-etc-ovs\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.907348 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/db7a99f8-4e0e-408b-9b96-39340c35d4d8-var-log\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.913498 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/db7a99f8-4e0e-408b-9b96-39340c35d4d8-scripts\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.928304 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bxg6\" (UniqueName: \"kubernetes.io/projected/db7a99f8-4e0e-408b-9b96-39340c35d4d8-kube-api-access-2bxg6\") pod \"ovn-controller-ovs-c9ff2\" (UID: \"db7a99f8-4e0e-408b-9b96-39340c35d4d8\") " pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:09 crc kubenswrapper[4961]: I1205 17:49:09.966434 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ldph7" Dec 05 17:49:10 crc kubenswrapper[4961]: I1205 17:49:10.012279 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.375540 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.380844 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.385900 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-sb-ovndbs" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.386146 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovn-metrics" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.386167 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-sb-dockercfg-phfc6" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.386152 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-config" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.386563 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-sb-scripts" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.393187 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458151 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458235 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pqsnn\" (UniqueName: \"kubernetes.io/projected/1603a1ba-53e0-4707-a222-392195709f98-kube-api-access-pqsnn\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458283 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458328 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1603a1ba-53e0-4707-a222-392195709f98-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458374 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1603a1ba-53e0-4707-a222-392195709f98-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458416 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458447 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1603a1ba-53e0-4707-a222-392195709f98-config\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.458466 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559358 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1603a1ba-53e0-4707-a222-392195709f98-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559427 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559451 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559467 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1603a1ba-53e0-4707-a222-392195709f98-config\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559503 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559530 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pqsnn\" (UniqueName: \"kubernetes.io/projected/1603a1ba-53e0-4707-a222-392195709f98-kube-api-access-pqsnn\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559559 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.559591 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1603a1ba-53e0-4707-a222-392195709f98-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.560239 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/1603a1ba-53e0-4707-a222-392195709f98-ovsdb-rundir\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.560374 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") device mount path \"/mnt/openstack/pv05\"" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.560958 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1603a1ba-53e0-4707-a222-392195709f98-scripts\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.561294 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1603a1ba-53e0-4707-a222-392195709f98-config\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.567548 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb-tls-certs\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-ovsdbserver-sb-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.568059 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-metrics-certs-tls-certs\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.568911 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1603a1ba-53e0-4707-a222-392195709f98-combined-ca-bundle\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.588020 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage05-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage05-crc\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.594887 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pqsnn\" (UniqueName: \"kubernetes.io/projected/1603a1ba-53e0-4707-a222-392195709f98-kube-api-access-pqsnn\") pod \"ovsdbserver-sb-0\" (UID: \"1603a1ba-53e0-4707-a222-392195709f98\") " pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.719491 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.959642 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.961114 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.963158 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovncluster-ovndbcluster-nb-dockercfg-w9qbw" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.963498 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-config" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.967567 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovndbcluster-nb-scripts" Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.970538 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 17:49:12 crc kubenswrapper[4961]: I1205 17:49:12.971386 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovndbcluster-nb-ovndbs" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.065663 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.065709 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.065740 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vmd6f\" (UniqueName: \"kubernetes.io/projected/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-kube-api-access-vmd6f\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.065800 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-config\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.065838 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.065971 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.066010 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.066050 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167094 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167142 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167175 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vmd6f\" (UniqueName: \"kubernetes.io/projected/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-kube-api-access-vmd6f\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167207 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-config\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167240 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167268 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167293 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167322 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.167888 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdb-rundir\" (UniqueName: \"kubernetes.io/empty-dir/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-ovsdb-rundir\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.168073 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") device mount path \"/mnt/openstack/pv06\"" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.168530 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-config\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.168576 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-scripts\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.171725 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-combined-ca-bundle\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.171806 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb-tls-certs\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-ovsdbserver-nb-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.171830 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-metrics-certs-tls-certs\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.184029 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vmd6f\" (UniqueName: \"kubernetes.io/projected/38c3904c-e6f1-4d83-bfbc-d5c39f52d67d-kube-api-access-vmd6f\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.191440 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"ovsdbserver-nb-0\" (UID: \"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d\") " pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:13 crc kubenswrapper[4961]: I1205 17:49:13.276361 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:23 crc kubenswrapper[4961]: E1205 17:49:23.592315 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 17:49:23 crc kubenswrapper[4961]: E1205 17:49:23.592884 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-x9f5l,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-galera-0_openstack(78297e26-2e01-4bb1-8f35-c96861dfda09): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:23 crc kubenswrapper[4961]: E1205 17:49:23.594041 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-galera-0" podUID="78297e26-2e01-4bb1-8f35-c96861dfda09" Dec 05 17:49:23 crc kubenswrapper[4961]: E1205 17:49:23.993267 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-galera-0" podUID="78297e26-2e01-4bb1-8f35-c96861dfda09" Dec 05 17:49:27 crc kubenswrapper[4961]: I1205 17:49:27.246031 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:49:27 crc kubenswrapper[4961]: I1205 17:49:27.246374 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:49:31 crc kubenswrapper[4961]: E1205 17:49:31.819916 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-memcached:current-podified" Dec 05 17:49:31 crc kubenswrapper[4961]: E1205 17:49:31.820701 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:memcached,Image:quay.io/podified-antelope-centos9/openstack-memcached:current-podified,Command:[/usr/bin/dumb-init -- /usr/local/bin/kolla_start],Args:[],WorkingDir:,Ports:[]ContainerPort{ContainerPort{Name:memcached,HostPort:0,ContainerPort:11211,Protocol:TCP,HostIP:,},ContainerPort{Name:memcached-tls,HostPort:0,ContainerPort:11212,Protocol:TCP,HostIP:,},},Env:[]EnvVar{EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:POD_IPS,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIPs,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},EnvVar{Name:CONFIG_HASH,Value:n7chfdh5c6h55fh66fh84h67bh5c7h5c5h67dh677hf4h5fbh8fh64h66bh64ch66bhd8h575h5ddh5h646hbbh74h558h74h66fh5dchd7h96h645q,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/src,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/certs/memcached.crt,SubPath:tls.crt,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:memcached-tls-certs,ReadOnly:true,MountPath:/var/lib/config-data/tls/private/memcached.key,SubPath:tls.key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-jcwpw,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:3,TimeoutSeconds:5,PeriodSeconds:3,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:nil,HTTPGet:nil,TCPSocket:&TCPSocketAction{Port:{0 11211 },Host:,},GRPC:nil,},InitialDelaySeconds:5,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42457,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42457,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod memcached-0_openstack(4c503456-1649-444c-a321-687b4294d2fa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:31 crc kubenswrapper[4961]: E1205 17:49:31.821940 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/memcached-0" podUID="4c503456-1649-444c-a321-687b4294d2fa" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.056419 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"memcached\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-memcached:current-podified\\\"\"" pod="openstack/memcached-0" podUID="4c503456-1649-444c-a321-687b4294d2fa" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.935747 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.936407 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qbtnq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-server-0_openstack(346da897-3e71-4d6f-b17d-fe5f905dd705): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.937670 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-server-0" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.946965 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.947222 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:setup-container,Image:quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified,Command:[sh -c cp /tmp/erlang-cookie-secret/.erlang.cookie /var/lib/rabbitmq/.erlang.cookie && chmod 600 /var/lib/rabbitmq/.erlang.cookie ; cp /tmp/rabbitmq-plugins/enabled_plugins /operator/enabled_plugins ; echo '[default]' > /var/lib/rabbitmq/.rabbitmqadmin.conf && sed -e 's/default_user/username/' -e 's/default_pass/password/' /tmp/default_user.conf >> /var/lib/rabbitmq/.rabbitmqadmin.conf && chmod 600 /var/lib/rabbitmq/.rabbitmqadmin.conf ; sleep 30],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Requests:ResourceList{cpu: {{20 -3} {} 20m DecimalSI},memory: {{67108864 0} {} BinarySI},},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:plugins-conf,ReadOnly:false,MountPath:/tmp/rabbitmq-plugins/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-erlang-cookie,ReadOnly:false,MountPath:/var/lib/rabbitmq/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:erlang-cookie-secret,ReadOnly:false,MountPath:/tmp/erlang-cookie-secret/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-plugins,ReadOnly:false,MountPath:/operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:persistence,ReadOnly:false,MountPath:/var/lib/rabbitmq/mnesia/,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:rabbitmq-confd,ReadOnly:false,MountPath:/tmp/default_user.conf,SubPath:default_user.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-5wnfm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod rabbitmq-cell1-server-0_openstack(494ff2b1-6bb3-4c8a-be81-02fe6f884caa): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:32 crc kubenswrapper[4961]: E1205 17:49:32.948499 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/rabbitmq-cell1-server-0" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.047413 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-mariadb:current-podified" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.047565 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:mysql-bootstrap,Image:quay.io/podified-antelope-centos9/openstack-mariadb:current-podified,Command:[bash /var/lib/operator-scripts/mysql_bootstrap.sh],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:True,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:mysql-db,ReadOnly:false,MountPath:/var/lib/mysql,SubPath:mysql,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-default,ReadOnly:true,MountPath:/var/lib/config-data/default,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data-generated,ReadOnly:false,MountPath:/var/lib/config-data/generated,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:operator-scripts,ReadOnly:true,MountPath:/var/lib/operator-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kolla-config,ReadOnly:true,MountPath:/var/lib/kolla/config_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ztcp5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod openstack-cell1-galera-0_openstack(786a3535-1c16-4389-9239-49f6d349c3af): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.048764 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/openstack-cell1-galera-0" podUID="786a3535-1c16-4389-9239-49f6d349c3af" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.067537 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mysql-bootstrap\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-mariadb:current-podified\\\"\"" pod="openstack/openstack-cell1-galera-0" podUID="786a3535-1c16-4389-9239-49f6d349c3af" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.067585 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-cell1-server-0" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.068961 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"setup-container\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-rabbitmq:current-podified\\\"\"" pod="openstack/rabbitmq-server-0" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.772852 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.773062 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n68chd6h679hbfh55fhc6h5ffh5d8h94h56ch589hb4hc5h57bh677hcdh655h8dh667h675h654h66ch567h8fh659h5b4h675h566h55bh54h67dh6dq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rbm49,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-666b6646f7-t5s6v_openstack(6daed413-edbd-4f1e-8a9e-2d51d9223af2): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.775109 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.778615 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.779374 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:ndfhb5h667h568h584h5f9h58dh565h664h587h597h577h64bh5c4h66fh647hbdh68ch5c5h68dh686h5f7h64hd7hc6h55fh57bh98h57fh87h5fh57fq,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8mmjm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-78dd6ddcc-ds5zx_openstack(9416be69-29a1-4785-8f71-78c22f6c33ff): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.781101 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" podUID="9416be69-29a1-4785-8f71-78c22f6c33ff" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.788114 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.788286 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nffh5bdhf4h5f8h79h55h77h58fh56dh7bh6fh578hbch55dh68h56bhd9h65dh57ch658hc9h566h666h688h58h65dh684h5d7h6ch575h5d6h88q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-h74jc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-675f4bcbfc-sk59h_openstack(682c1877-0a97-4c80-ae9b-de6bebbb1b3d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.789495 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" podUID="682c1877-0a97-4c80-ae9b-de6bebbb1b3d" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.854005 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.854229 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:init,Image:quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified,Command:[/bin/bash],Args:[-c dnsmasq --interface=* --conf-dir=/etc/dnsmasq.d --hostsdir=/etc/dnsmasq.d/hosts --keep-in-foreground --log-debug --bind-interfaces --listen-address=$(POD_IP) --port 5353 --log-facility=- --no-hosts --domain-needed --no-resolv --bogus-priv --log-queries --test],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n659h4h664hbh658h587h67ch89h587h8fh679hc6hf9h55fh644h5d5h698h68dh5cdh5ffh669h54ch9h689hb8hd4h5bfhd8h5d7h5fh665h574q,ValueFrom:nil,},EnvVar{Name:POD_IP,Value:,ValueFrom:&EnvVarSource{FieldRef:&ObjectFieldSelector{APIVersion:v1,FieldPath:status.podIP,},ResourceFieldRef:nil,ConfigMapKeyRef:nil,SecretKeyRef:nil,},},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config,ReadOnly:true,MountPath:/etc/dnsmasq.d/config.cfg,SubPath:dns,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dns-svc,ReadOnly:true,MountPath:/etc/dnsmasq.d/hosts/dns-svc,SubPath:dns-svc,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-77ggr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000650000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod dnsmasq-dns-57d769cc4f-qnvkq_openstack(7c79b87d-94a7-47d6-aaa5-3d420ce891d0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:49:33 crc kubenswrapper[4961]: E1205 17:49:33.855410 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" podUID="7c79b87d-94a7-47d6-aaa5-3d420ce891d0" Dec 05 17:49:34 crc kubenswrapper[4961]: E1205 17:49:34.073856 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" Dec 05 17:49:34 crc kubenswrapper[4961]: E1205 17:49:34.073697 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"init\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-neutron-server:current-podified\\\"\"" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" podUID="7c79b87d-94a7-47d6-aaa5-3d420ce891d0" Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.463626 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ovs-c9ff2"] Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.672934 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ldph7"] Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.719393 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-sb-0"] Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.763736 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.770594 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:49:34 crc kubenswrapper[4961]: W1205 17:49:34.805801 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1603a1ba_53e0_4707_a222_392195709f98.slice/crio-1445323f88a5deae424ac8314eb6d6c2d3779ba576309e3437ba503d91de1b39 WatchSource:0}: Error finding container 1445323f88a5deae424ac8314eb6d6c2d3779ba576309e3437ba503d91de1b39: Status 404 returned error can't find the container with id 1445323f88a5deae424ac8314eb6d6c2d3779ba576309e3437ba503d91de1b39 Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.914687 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-config\") pod \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.914751 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-dns-svc\") pod \"9416be69-29a1-4785-8f71-78c22f6c33ff\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.914844 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h74jc\" (UniqueName: \"kubernetes.io/projected/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-kube-api-access-h74jc\") pod \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\" (UID: \"682c1877-0a97-4c80-ae9b-de6bebbb1b3d\") " Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.914908 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mmjm\" (UniqueName: \"kubernetes.io/projected/9416be69-29a1-4785-8f71-78c22f6c33ff-kube-api-access-8mmjm\") pod \"9416be69-29a1-4785-8f71-78c22f6c33ff\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.914957 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-config\") pod \"9416be69-29a1-4785-8f71-78c22f6c33ff\" (UID: \"9416be69-29a1-4785-8f71-78c22f6c33ff\") " Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.915946 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-config" (OuterVolumeSpecName: "config") pod "9416be69-29a1-4785-8f71-78c22f6c33ff" (UID: "9416be69-29a1-4785-8f71-78c22f6c33ff"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.916394 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "9416be69-29a1-4785-8f71-78c22f6c33ff" (UID: "9416be69-29a1-4785-8f71-78c22f6c33ff"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.917529 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-config" (OuterVolumeSpecName: "config") pod "682c1877-0a97-4c80-ae9b-de6bebbb1b3d" (UID: "682c1877-0a97-4c80-ae9b-de6bebbb1b3d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.924004 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9416be69-29a1-4785-8f71-78c22f6c33ff-kube-api-access-8mmjm" (OuterVolumeSpecName: "kube-api-access-8mmjm") pod "9416be69-29a1-4785-8f71-78c22f6c33ff" (UID: "9416be69-29a1-4785-8f71-78c22f6c33ff"). InnerVolumeSpecName "kube-api-access-8mmjm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:34 crc kubenswrapper[4961]: I1205 17:49:34.924058 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-kube-api-access-h74jc" (OuterVolumeSpecName: "kube-api-access-h74jc") pod "682c1877-0a97-4c80-ae9b-de6bebbb1b3d" (UID: "682c1877-0a97-4c80-ae9b-de6bebbb1b3d"). InnerVolumeSpecName "kube-api-access-h74jc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.018061 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h74jc\" (UniqueName: \"kubernetes.io/projected/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-kube-api-access-h74jc\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.018473 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mmjm\" (UniqueName: \"kubernetes.io/projected/9416be69-29a1-4785-8f71-78c22f6c33ff-kube-api-access-8mmjm\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.018498 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.018516 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/682c1877-0a97-4c80-ae9b-de6bebbb1b3d-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.018582 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/9416be69-29a1-4785-8f71-78c22f6c33ff-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.090348 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c9ff2" event={"ID":"db7a99f8-4e0e-408b-9b96-39340c35d4d8","Type":"ContainerStarted","Data":"524831b708ce1607026362194d288c9cf3bc955406099947c2c48411901a2744"} Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.092605 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" event={"ID":"682c1877-0a97-4c80-ae9b-de6bebbb1b3d","Type":"ContainerDied","Data":"9967e21f9a22f404dc56fdfdc51a86c5a26a770e52df2b5230c3a2c8111d3752"} Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.092687 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-675f4bcbfc-sk59h" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.094720 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" event={"ID":"9416be69-29a1-4785-8f71-78c22f6c33ff","Type":"ContainerDied","Data":"0fc4bf7259022b5148e233d27109b7081fd95659f7db57a2c82036bd6e80b688"} Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.094934 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-78dd6ddcc-ds5zx" Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.097986 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1603a1ba-53e0-4707-a222-392195709f98","Type":"ContainerStarted","Data":"1445323f88a5deae424ac8314eb6d6c2d3779ba576309e3437ba503d91de1b39"} Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.101249 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ldph7" event={"ID":"6b6b1e99-e081-4c93-8fe8-c693eb7a0205","Type":"ContainerStarted","Data":"f455c93949d6bd8282b31a8e44ff60b475aa2b657eab3a664d3ab255b3cf0562"} Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.138991 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sk59h"] Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.145880 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-675f4bcbfc-sk59h"] Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.168853 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ds5zx"] Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.175873 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-78dd6ddcc-ds5zx"] Dec 05 17:49:35 crc kubenswrapper[4961]: I1205 17:49:35.421296 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovsdbserver-nb-0"] Dec 05 17:49:36 crc kubenswrapper[4961]: I1205 17:49:36.110429 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5123c9de-dcdc-4189-8bad-330610afddd3","Type":"ContainerStarted","Data":"9b699dbd0951639780506a5a40563260fe0d7d437c24fb4c8baa72945380f33e"} Dec 05 17:49:36 crc kubenswrapper[4961]: I1205 17:49:36.110766 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 17:49:36 crc kubenswrapper[4961]: I1205 17:49:36.111414 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d","Type":"ContainerStarted","Data":"7bf68ab4e32caafddf43c61552d4ac9f62ece1653e4ab4f8ae4898e3ca2f8d44"} Dec 05 17:49:36 crc kubenswrapper[4961]: I1205 17:49:36.880172 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="682c1877-0a97-4c80-ae9b-de6bebbb1b3d" path="/var/lib/kubelet/pods/682c1877-0a97-4c80-ae9b-de6bebbb1b3d/volumes" Dec 05 17:49:36 crc kubenswrapper[4961]: I1205 17:49:36.880975 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9416be69-29a1-4785-8f71-78c22f6c33ff" path="/var/lib/kubelet/pods/9416be69-29a1-4785-8f71-78c22f6c33ff/volumes" Dec 05 17:49:38 crc kubenswrapper[4961]: I1205 17:49:38.888971 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=4.448205 podStartE2EDuration="33.888948435s" podCreationTimestamp="2025-12-05 17:49:05 +0000 UTC" firstStartedPulling="2025-12-05 17:49:06.244465698 +0000 UTC m=+952.305616171" lastFinishedPulling="2025-12-05 17:49:35.685209133 +0000 UTC m=+981.746359606" observedRunningTime="2025-12-05 17:49:36.135013249 +0000 UTC m=+982.196163732" watchObservedRunningTime="2025-12-05 17:49:38.888948435 +0000 UTC m=+984.950098908" Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.136746 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1603a1ba-53e0-4707-a222-392195709f98","Type":"ContainerStarted","Data":"ea63a6a5c1087fda3416f984ed0d88f8a40b90a954e276502618708fd55d9e0a"} Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.138940 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ldph7" event={"ID":"6b6b1e99-e081-4c93-8fe8-c693eb7a0205","Type":"ContainerStarted","Data":"030a8fddeb27ecf24fc08144e570305163ac4f21d4de20b3bb01df5cd96e02da"} Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.140027 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ldph7" Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.142943 4961 generic.go:334] "Generic (PLEG): container finished" podID="db7a99f8-4e0e-408b-9b96-39340c35d4d8" containerID="3e3f1f226d4ccf957aa986c5e4bdd3bf0f771276ec02e05a1298cfc8ab9724c0" exitCode=0 Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.143039 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c9ff2" event={"ID":"db7a99f8-4e0e-408b-9b96-39340c35d4d8","Type":"ContainerDied","Data":"3e3f1f226d4ccf957aa986c5e4bdd3bf0f771276ec02e05a1298cfc8ab9724c0"} Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.146909 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d","Type":"ContainerStarted","Data":"468b5680b300b1dd82b6c9df62062213fc45d356e465f5c176d423213079f30f"} Dec 05 17:49:39 crc kubenswrapper[4961]: I1205 17:49:39.158849 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ldph7" podStartSLOduration=26.667378865 podStartE2EDuration="30.158828955s" podCreationTimestamp="2025-12-05 17:49:09 +0000 UTC" firstStartedPulling="2025-12-05 17:49:34.726156774 +0000 UTC m=+980.787307247" lastFinishedPulling="2025-12-05 17:49:38.217606874 +0000 UTC m=+984.278757337" observedRunningTime="2025-12-05 17:49:39.154298115 +0000 UTC m=+985.215448618" watchObservedRunningTime="2025-12-05 17:49:39.158828955 +0000 UTC m=+985.219979428" Dec 05 17:49:40 crc kubenswrapper[4961]: I1205 17:49:40.156996 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78297e26-2e01-4bb1-8f35-c96861dfda09","Type":"ContainerStarted","Data":"00fd92e48b15740cb9905d0963ac080fb0ca80915d7f444ee1b00ae874e293b4"} Dec 05 17:49:40 crc kubenswrapper[4961]: I1205 17:49:40.159243 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c9ff2" event={"ID":"db7a99f8-4e0e-408b-9b96-39340c35d4d8","Type":"ContainerStarted","Data":"abeaf11de65a324ab497ab9624e699c69a3c01fa14a5186c8f6a15ee312e1876"} Dec 05 17:49:40 crc kubenswrapper[4961]: I1205 17:49:40.159294 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ovs-c9ff2" event={"ID":"db7a99f8-4e0e-408b-9b96-39340c35d4d8","Type":"ContainerStarted","Data":"0a2a2516d670def219cba94ac60ebcafba424da5313c8e023271139ad96e761f"} Dec 05 17:49:40 crc kubenswrapper[4961]: I1205 17:49:40.159495 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:41 crc kubenswrapper[4961]: I1205 17:49:41.167079 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:49:43 crc kubenswrapper[4961]: I1205 17:49:43.894685 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ovs-c9ff2" podStartSLOduration=31.277180975 podStartE2EDuration="34.894656892s" podCreationTimestamp="2025-12-05 17:49:09 +0000 UTC" firstStartedPulling="2025-12-05 17:49:34.600285171 +0000 UTC m=+980.661435644" lastFinishedPulling="2025-12-05 17:49:38.217761088 +0000 UTC m=+984.278911561" observedRunningTime="2025-12-05 17:49:40.195829453 +0000 UTC m=+986.256979936" watchObservedRunningTime="2025-12-05 17:49:43.894656892 +0000 UTC m=+989.955807385" Dec 05 17:49:44 crc kubenswrapper[4961]: I1205 17:49:44.211750 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-nb-0" event={"ID":"38c3904c-e6f1-4d83-bfbc-d5c39f52d67d","Type":"ContainerStarted","Data":"3f4196da1c228b1620ac3d6e7a026037de73a253893e58d769812780bbf38fd0"} Dec 05 17:49:44 crc kubenswrapper[4961]: I1205 17:49:44.213715 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovsdbserver-sb-0" event={"ID":"1603a1ba-53e0-4707-a222-392195709f98","Type":"ContainerStarted","Data":"74dbd82fd4d2adfa72ffe12de7b8f6be03108d82332802105a6f6d4ca86d883e"} Dec 05 17:49:44 crc kubenswrapper[4961]: I1205 17:49:44.215395 4961 generic.go:334] "Generic (PLEG): container finished" podID="78297e26-2e01-4bb1-8f35-c96861dfda09" containerID="00fd92e48b15740cb9905d0963ac080fb0ca80915d7f444ee1b00ae874e293b4" exitCode=0 Dec 05 17:49:44 crc kubenswrapper[4961]: I1205 17:49:44.215448 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78297e26-2e01-4bb1-8f35-c96861dfda09","Type":"ContainerDied","Data":"00fd92e48b15740cb9905d0963ac080fb0ca80915d7f444ee1b00ae874e293b4"} Dec 05 17:49:44 crc kubenswrapper[4961]: I1205 17:49:44.248290 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-nb-0" podStartSLOduration=26.917910164 podStartE2EDuration="33.248247141s" podCreationTimestamp="2025-12-05 17:49:11 +0000 UTC" firstStartedPulling="2025-12-05 17:49:35.640866667 +0000 UTC m=+981.702017140" lastFinishedPulling="2025-12-05 17:49:41.971203644 +0000 UTC m=+988.032354117" observedRunningTime="2025-12-05 17:49:44.234257048 +0000 UTC m=+990.295407531" watchObservedRunningTime="2025-12-05 17:49:44.248247141 +0000 UTC m=+990.309397624" Dec 05 17:49:44 crc kubenswrapper[4961]: I1205 17:49:44.266436 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovsdbserver-sb-0" podStartSLOduration=26.09455369 podStartE2EDuration="33.266411746s" podCreationTimestamp="2025-12-05 17:49:11 +0000 UTC" firstStartedPulling="2025-12-05 17:49:34.808402969 +0000 UTC m=+980.869553442" lastFinishedPulling="2025-12-05 17:49:41.980261025 +0000 UTC m=+988.041411498" observedRunningTime="2025-12-05 17:49:44.260382668 +0000 UTC m=+990.321533151" watchObservedRunningTime="2025-12-05 17:49:44.266411746 +0000 UTC m=+990.327562229" Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.224532 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/memcached-0" event={"ID":"4c503456-1649-444c-a321-687b4294d2fa","Type":"ContainerStarted","Data":"f386d3981c4844fca100df1cfe1140385f65b7ac3eaaefc82537affbfd7f232c"} Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.225010 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/memcached-0" Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.227362 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-galera-0" event={"ID":"78297e26-2e01-4bb1-8f35-c96861dfda09","Type":"ContainerStarted","Data":"302c77b0b8a341da7de8c4ba1817fe537f44d4d13a8f2b11ceaaf3fdecb49257"} Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.244094 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/memcached-0" podStartSLOduration=2.066833266 podStartE2EDuration="42.244078989s" podCreationTimestamp="2025-12-05 17:49:03 +0000 UTC" firstStartedPulling="2025-12-05 17:49:04.161390512 +0000 UTC m=+950.222540985" lastFinishedPulling="2025-12-05 17:49:44.338636235 +0000 UTC m=+990.399786708" observedRunningTime="2025-12-05 17:49:45.241821335 +0000 UTC m=+991.302971818" watchObservedRunningTime="2025-12-05 17:49:45.244078989 +0000 UTC m=+991.305229462" Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.269170 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-galera-0" podStartSLOduration=8.621546276 podStartE2EDuration="45.269151304s" podCreationTimestamp="2025-12-05 17:49:00 +0000 UTC" firstStartedPulling="2025-12-05 17:49:02.718538635 +0000 UTC m=+948.779689098" lastFinishedPulling="2025-12-05 17:49:39.366143663 +0000 UTC m=+985.427294126" observedRunningTime="2025-12-05 17:49:45.262531572 +0000 UTC m=+991.323682035" watchObservedRunningTime="2025-12-05 17:49:45.269151304 +0000 UTC m=+991.330301777" Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.540685 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.719628 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:45 crc kubenswrapper[4961]: I1205 17:49:45.755485 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.234998 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"346da897-3e71-4d6f-b17d-fe5f905dd705","Type":"ContainerStarted","Data":"c802bc16e5515abf77c97c7accaacad9ade519a54f952519cddc8c8437ae18cf"} Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.237217 4961 generic.go:334] "Generic (PLEG): container finished" podID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerID="ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59" exitCode=0 Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.237292 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" event={"ID":"6daed413-edbd-4f1e-8a9e-2d51d9223af2","Type":"ContainerDied","Data":"ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59"} Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.237636 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.277077 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.279650 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-sb-0" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.340702 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.522113 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qnvkq"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.565874 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-metrics-bkcrz"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.567293 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.569560 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-metrics-config" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.579119 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2jpmt"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.580677 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.584403 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-sb" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.587816 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-bkcrz"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.598047 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2jpmt"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.633989 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3854940d-06c0-4afd-a62f-eeeff97e5b7f-config\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634050 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3854940d-06c0-4afd-a62f-eeeff97e5b7f-ovn-rundir\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634087 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-config\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634107 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3854940d-06c0-4afd-a62f-eeeff97e5b7f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634141 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mv7t2\" (UniqueName: \"kubernetes.io/projected/3854940d-06c0-4afd-a62f-eeeff97e5b7f-kube-api-access-mv7t2\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634181 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3854940d-06c0-4afd-a62f-eeeff97e5b7f-combined-ca-bundle\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634198 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634217 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5gqmp\" (UniqueName: \"kubernetes.io/projected/83e4b124-11e4-425b-aae4-3996e61188af-kube-api-access-5gqmp\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634244 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3854940d-06c0-4afd-a62f-eeeff97e5b7f-ovs-rundir\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.634275 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.735367 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-config\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.735419 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3854940d-06c0-4afd-a62f-eeeff97e5b7f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.735470 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mv7t2\" (UniqueName: \"kubernetes.io/projected/3854940d-06c0-4afd-a62f-eeeff97e5b7f-kube-api-access-mv7t2\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.735506 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3854940d-06c0-4afd-a62f-eeeff97e5b7f-combined-ca-bundle\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736362 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736403 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5gqmp\" (UniqueName: \"kubernetes.io/projected/83e4b124-11e4-425b-aae4-3996e61188af-kube-api-access-5gqmp\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736444 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3854940d-06c0-4afd-a62f-eeeff97e5b7f-ovs-rundir\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736525 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736563 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3854940d-06c0-4afd-a62f-eeeff97e5b7f-config\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736593 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3854940d-06c0-4afd-a62f-eeeff97e5b7f-ovn-rundir\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736721 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-config\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736863 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovs-rundir\" (UniqueName: \"kubernetes.io/host-path/3854940d-06c0-4afd-a62f-eeeff97e5b7f-ovs-rundir\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736880 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/host-path/3854940d-06c0-4afd-a62f-eeeff97e5b7f-ovn-rundir\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.736945 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-dns-svc\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.737518 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3854940d-06c0-4afd-a62f-eeeff97e5b7f-config\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.737815 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-ovsdbserver-sb\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.743367 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/3854940d-06c0-4afd-a62f-eeeff97e5b7f-metrics-certs-tls-certs\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.752546 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3854940d-06c0-4afd-a62f-eeeff97e5b7f-combined-ca-bundle\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.754297 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mv7t2\" (UniqueName: \"kubernetes.io/projected/3854940d-06c0-4afd-a62f-eeeff97e5b7f-kube-api-access-mv7t2\") pod \"ovn-controller-metrics-bkcrz\" (UID: \"3854940d-06c0-4afd-a62f-eeeff97e5b7f\") " pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.764417 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5gqmp\" (UniqueName: \"kubernetes.io/projected/83e4b124-11e4-425b-aae4-3996e61188af-kube-api-access-5gqmp\") pod \"dnsmasq-dns-6bc7876d45-2jpmt\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.878936 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.886158 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-metrics-bkcrz" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.912521 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.933978 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-t5s6v"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.941606 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77ggr\" (UniqueName: \"kubernetes.io/projected/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-kube-api-access-77ggr\") pod \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.941684 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-config\") pod \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.941954 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-dns-svc\") pod \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\" (UID: \"7c79b87d-94a7-47d6-aaa5-3d420ce891d0\") " Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.943884 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "7c79b87d-94a7-47d6-aaa5-3d420ce891d0" (UID: "7c79b87d-94a7-47d6-aaa5-3d420ce891d0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.944840 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8554648995-qggrr"] Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.945000 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-config" (OuterVolumeSpecName: "config") pod "7c79b87d-94a7-47d6-aaa5-3d420ce891d0" (UID: "7c79b87d-94a7-47d6-aaa5-3d420ce891d0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.946173 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.947673 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-kube-api-access-77ggr" (OuterVolumeSpecName: "kube-api-access-77ggr") pod "7c79b87d-94a7-47d6-aaa5-3d420ce891d0" (UID: "7c79b87d-94a7-47d6-aaa5-3d420ce891d0"). InnerVolumeSpecName "kube-api-access-77ggr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.954213 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovsdbserver-nb" Dec 05 17:49:46 crc kubenswrapper[4961]: I1205 17:49:46.985480 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-qggrr"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.044686 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4s2fv\" (UniqueName: \"kubernetes.io/projected/50c2c6e1-8c4d-4464-a362-3e99192aa795-kube-api-access-4s2fv\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.045042 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-dns-svc\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.046292 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-config\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.046375 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.046519 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.046567 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.046595 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77ggr\" (UniqueName: \"kubernetes.io/projected/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-kube-api-access-77ggr\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.046606 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7c79b87d-94a7-47d6-aaa5-3d420ce891d0-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.148119 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.148199 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.148241 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4s2fv\" (UniqueName: \"kubernetes.io/projected/50c2c6e1-8c4d-4464-a362-3e99192aa795-kube-api-access-4s2fv\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.148260 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-dns-svc\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.148671 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-config\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.149801 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-nb\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.150649 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-dns-svc\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.150758 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-config\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.150899 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-sb\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.170155 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4s2fv\" (UniqueName: \"kubernetes.io/projected/50c2c6e1-8c4d-4464-a362-3e99192aa795-kube-api-access-4s2fv\") pod \"dnsmasq-dns-8554648995-qggrr\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.274713 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" event={"ID":"6daed413-edbd-4f1e-8a9e-2d51d9223af2","Type":"ContainerStarted","Data":"22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983"} Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.274906 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerName="dnsmasq-dns" containerID="cri-o://22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983" gracePeriod=10 Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.275240 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.284666 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.286845 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-57d769cc4f-qnvkq" event={"ID":"7c79b87d-94a7-47d6-aaa5-3d420ce891d0","Type":"ContainerDied","Data":"940c4c5cf92a3946f00019d74891e324d0bbacfd54f763576227c7b185510743"} Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.286897 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.305212 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.308722 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" podStartSLOduration=3.551420882 podStartE2EDuration="49.308708415s" podCreationTimestamp="2025-12-05 17:48:58 +0000 UTC" firstStartedPulling="2025-12-05 17:48:59.522938552 +0000 UTC m=+945.584089015" lastFinishedPulling="2025-12-05 17:49:45.280226075 +0000 UTC m=+991.341376548" observedRunningTime="2025-12-05 17:49:47.304516522 +0000 UTC m=+993.365667015" watchObservedRunningTime="2025-12-05 17:49:47.308708415 +0000 UTC m=+993.369858888" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.328836 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovsdbserver-nb-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.436928 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-metrics-bkcrz"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.500049 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qnvkq"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.508668 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-57d769cc4f-qnvkq"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.569337 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2jpmt"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.715917 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-northd-0"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.717359 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.722397 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ovnnorthd-ovndbs" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.722585 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-config" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.722705 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ovnnorthd-ovnnorthd-dockercfg-4jfvx" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.723366 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovnnorthd-scripts" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.733983 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.790263 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.879977 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.880024 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz5gr\" (UniqueName: \"kubernetes.io/projected/419fa856-384a-4fd1-95e7-7810e12b1307-kube-api-access-gz5gr\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.880041 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/419fa856-384a-4fd1-95e7-7810e12b1307-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.880079 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.880094 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.880129 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/419fa856-384a-4fd1-95e7-7810e12b1307-scripts\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.880152 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/419fa856-384a-4fd1-95e7-7810e12b1307-config\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.981647 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rbm49\" (UniqueName: \"kubernetes.io/projected/6daed413-edbd-4f1e-8a9e-2d51d9223af2-kube-api-access-rbm49\") pod \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.981748 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-dns-svc\") pod \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.981820 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-config\") pod \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\" (UID: \"6daed413-edbd-4f1e-8a9e-2d51d9223af2\") " Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982061 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982090 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982163 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/419fa856-384a-4fd1-95e7-7810e12b1307-scripts\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982212 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/419fa856-384a-4fd1-95e7-7810e12b1307-config\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982346 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982377 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz5gr\" (UniqueName: \"kubernetes.io/projected/419fa856-384a-4fd1-95e7-7810e12b1307-kube-api-access-gz5gr\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.982401 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/419fa856-384a-4fd1-95e7-7810e12b1307-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.983421 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-rundir\" (UniqueName: \"kubernetes.io/empty-dir/419fa856-384a-4fd1-95e7-7810e12b1307-ovn-rundir\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.984477 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/419fa856-384a-4fd1-95e7-7810e12b1307-config\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.984480 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/419fa856-384a-4fd1-95e7-7810e12b1307-scripts\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.986154 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs-tls-certs\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-metrics-certs-tls-certs\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.988368 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-northd-tls-certs\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-ovn-northd-tls-certs\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.990278 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6daed413-edbd-4f1e-8a9e-2d51d9223af2-kube-api-access-rbm49" (OuterVolumeSpecName: "kube-api-access-rbm49") pod "6daed413-edbd-4f1e-8a9e-2d51d9223af2" (UID: "6daed413-edbd-4f1e-8a9e-2d51d9223af2"). InnerVolumeSpecName "kube-api-access-rbm49". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:47 crc kubenswrapper[4961]: I1205 17:49:47.991425 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/419fa856-384a-4fd1-95e7-7810e12b1307-combined-ca-bundle\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:48 crc kubenswrapper[4961]: W1205 17:49:48.006307 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod50c2c6e1_8c4d_4464_a362_3e99192aa795.slice/crio-8789cc6f4b7a82dc930e7e96550002a9a8cc7341afa26c8385621fd49b58722e WatchSource:0}: Error finding container 8789cc6f4b7a82dc930e7e96550002a9a8cc7341afa26c8385621fd49b58722e: Status 404 returned error can't find the container with id 8789cc6f4b7a82dc930e7e96550002a9a8cc7341afa26c8385621fd49b58722e Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.008186 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8554648995-qggrr"] Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.010918 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz5gr\" (UniqueName: \"kubernetes.io/projected/419fa856-384a-4fd1-95e7-7810e12b1307-kube-api-access-gz5gr\") pod \"ovn-northd-0\" (UID: \"419fa856-384a-4fd1-95e7-7810e12b1307\") " pod="openstack/ovn-northd-0" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.021389 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-config" (OuterVolumeSpecName: "config") pod "6daed413-edbd-4f1e-8a9e-2d51d9223af2" (UID: "6daed413-edbd-4f1e-8a9e-2d51d9223af2"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.026381 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6daed413-edbd-4f1e-8a9e-2d51d9223af2" (UID: "6daed413-edbd-4f1e-8a9e-2d51d9223af2"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.071788 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-northd-0" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.083981 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rbm49\" (UniqueName: \"kubernetes.io/projected/6daed413-edbd-4f1e-8a9e-2d51d9223af2-kube-api-access-rbm49\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.084025 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.084037 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6daed413-edbd-4f1e-8a9e-2d51d9223af2-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.301439 4961 generic.go:334] "Generic (PLEG): container finished" podID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerID="22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983" exitCode=0 Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.301705 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" event={"ID":"6daed413-edbd-4f1e-8a9e-2d51d9223af2","Type":"ContainerDied","Data":"22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.301844 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.302103 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-666b6646f7-t5s6v" event={"ID":"6daed413-edbd-4f1e-8a9e-2d51d9223af2","Type":"ContainerDied","Data":"e3c6c3cb326609e61117a4b79a1dd7b6b702f8b4c2d5dfe1a9e4bb08d17af3e6"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.302158 4961 scope.go:117] "RemoveContainer" containerID="22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.319793 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-bkcrz" event={"ID":"3854940d-06c0-4afd-a62f-eeeff97e5b7f","Type":"ContainerStarted","Data":"4bab8126d7e013b8445e3bdb118926100faa1cec897e32194b6471ba35c2f39a"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.319897 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-metrics-bkcrz" event={"ID":"3854940d-06c0-4afd-a62f-eeeff97e5b7f","Type":"ContainerStarted","Data":"c41e1e9f6afff3ff76a253d5ecf4683e166997014c82d2778a1911a1a808c638"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.326144 4961 generic.go:334] "Generic (PLEG): container finished" podID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerID="aa5577c9449f6c2cbc0c0bf3eb9fd27ed9331244ae22e970acbee971b1db4509" exitCode=0 Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.326233 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-qggrr" event={"ID":"50c2c6e1-8c4d-4464-a362-3e99192aa795","Type":"ContainerDied","Data":"aa5577c9449f6c2cbc0c0bf3eb9fd27ed9331244ae22e970acbee971b1db4509"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.326296 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-qggrr" event={"ID":"50c2c6e1-8c4d-4464-a362-3e99192aa795","Type":"ContainerStarted","Data":"8789cc6f4b7a82dc930e7e96550002a9a8cc7341afa26c8385621fd49b58722e"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.332765 4961 scope.go:117] "RemoveContainer" containerID="ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.333333 4961 generic.go:334] "Generic (PLEG): container finished" podID="83e4b124-11e4-425b-aae4-3996e61188af" containerID="5abf322af3208c13a1ccfc8eb5bb766f4a0ecae89086effded1e44a3055125c9" exitCode=0 Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.338490 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" event={"ID":"83e4b124-11e4-425b-aae4-3996e61188af","Type":"ContainerDied","Data":"5abf322af3208c13a1ccfc8eb5bb766f4a0ecae89086effded1e44a3055125c9"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.338914 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" event={"ID":"83e4b124-11e4-425b-aae4-3996e61188af","Type":"ContainerStarted","Data":"901137673e05cd0d473abfdf5c799230dea8dd4237cf8a69df670f99f648aa25"} Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.344371 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-metrics-bkcrz" podStartSLOduration=2.344328739 podStartE2EDuration="2.344328739s" podCreationTimestamp="2025-12-05 17:49:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:49:48.339287605 +0000 UTC m=+994.400438108" watchObservedRunningTime="2025-12-05 17:49:48.344328739 +0000 UTC m=+994.405479202" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.384878 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-t5s6v"] Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.391674 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-666b6646f7-t5s6v"] Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.478496 4961 scope.go:117] "RemoveContainer" containerID="22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983" Dec 05 17:49:48 crc kubenswrapper[4961]: E1205 17:49:48.479058 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983\": container with ID starting with 22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983 not found: ID does not exist" containerID="22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.479107 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983"} err="failed to get container status \"22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983\": rpc error: code = NotFound desc = could not find container \"22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983\": container with ID starting with 22dbc34d3b8f7ea76b3f6c919abd4b3748c996fffda371ea740607f34b84a983 not found: ID does not exist" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.479135 4961 scope.go:117] "RemoveContainer" containerID="ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59" Dec 05 17:49:48 crc kubenswrapper[4961]: E1205 17:49:48.480076 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59\": container with ID starting with ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59 not found: ID does not exist" containerID="ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.480136 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59"} err="failed to get container status \"ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59\": rpc error: code = NotFound desc = could not find container \"ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59\": container with ID starting with ca11f63d2d2fd5669f07dcacd85e68c7c2f7ce9545bb8d7a6200559573589a59 not found: ID does not exist" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.577951 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-northd-0"] Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.876126 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" path="/var/lib/kubelet/pods/6daed413-edbd-4f1e-8a9e-2d51d9223af2/volumes" Dec 05 17:49:48 crc kubenswrapper[4961]: I1205 17:49:48.884688 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c79b87d-94a7-47d6-aaa5-3d420ce891d0" path="/var/lib/kubelet/pods/7c79b87d-94a7-47d6-aaa5-3d420ce891d0/volumes" Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.346157 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"419fa856-384a-4fd1-95e7-7810e12b1307","Type":"ContainerStarted","Data":"061df7d7f39ddab5635020d85e16d96b3c3c64eb6ae81044a9da696f0a09eb98"} Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.351242 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"786a3535-1c16-4389-9239-49f6d349c3af","Type":"ContainerStarted","Data":"e6d9dd909e50d7a873522edf925dcaf6b602af8d0ee341873f53935d451e79bc"} Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.353308 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-qggrr" event={"ID":"50c2c6e1-8c4d-4464-a362-3e99192aa795","Type":"ContainerStarted","Data":"3a6acdcab74ac97579b623373377ba5aa4e939301b5b8d1418433de339074323"} Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.354386 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.355968 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"494ff2b1-6bb3-4c8a-be81-02fe6f884caa","Type":"ContainerStarted","Data":"d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43"} Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.357987 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" event={"ID":"83e4b124-11e4-425b-aae4-3996e61188af","Type":"ContainerStarted","Data":"2d0fa3c69c3780d287657abfb32c09fc58128532def93035d4d9faa2903d83db"} Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.358413 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.406954 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" podStartSLOduration=3.406933473 podStartE2EDuration="3.406933473s" podCreationTimestamp="2025-12-05 17:49:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:49:49.401289225 +0000 UTC m=+995.462439718" watchObservedRunningTime="2025-12-05 17:49:49.406933473 +0000 UTC m=+995.468083966" Dec 05 17:49:49 crc kubenswrapper[4961]: I1205 17:49:49.424387 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8554648995-qggrr" podStartSLOduration=3.42436755 podStartE2EDuration="3.42436755s" podCreationTimestamp="2025-12-05 17:49:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:49:49.422216226 +0000 UTC m=+995.483366709" watchObservedRunningTime="2025-12-05 17:49:49.42436755 +0000 UTC m=+995.485518023" Dec 05 17:49:50 crc kubenswrapper[4961]: I1205 17:49:50.376262 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"419fa856-384a-4fd1-95e7-7810e12b1307","Type":"ContainerStarted","Data":"1ce2629a0f1b5e914ac3e063929d1e36222c45759c9b7a0458da4dff1344d2c7"} Dec 05 17:49:50 crc kubenswrapper[4961]: I1205 17:49:50.376582 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-northd-0" event={"ID":"419fa856-384a-4fd1-95e7-7810e12b1307","Type":"ContainerStarted","Data":"419db44ea11d962670c71f36674f3bb865a8231716f4fb3f4ff9c254b8d3f736"} Dec 05 17:49:50 crc kubenswrapper[4961]: I1205 17:49:50.396133 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-northd-0" podStartSLOduration=2.1446667010000002 podStartE2EDuration="3.396115389s" podCreationTimestamp="2025-12-05 17:49:47 +0000 UTC" firstStartedPulling="2025-12-05 17:49:48.603420424 +0000 UTC m=+994.664570897" lastFinishedPulling="2025-12-05 17:49:49.854869112 +0000 UTC m=+995.916019585" observedRunningTime="2025-12-05 17:49:50.394056659 +0000 UTC m=+996.455207152" watchObservedRunningTime="2025-12-05 17:49:50.396115389 +0000 UTC m=+996.457265862" Dec 05 17:49:51 crc kubenswrapper[4961]: I1205 17:49:51.384222 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ovn-northd-0" Dec 05 17:49:51 crc kubenswrapper[4961]: I1205 17:49:51.776836 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-galera-0" Dec 05 17:49:51 crc kubenswrapper[4961]: I1205 17:49:51.777151 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-galera-0" Dec 05 17:49:51 crc kubenswrapper[4961]: I1205 17:49:51.863458 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-galera-0" Dec 05 17:49:52 crc kubenswrapper[4961]: I1205 17:49:52.461226 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-galera-0" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.158664 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cf7d-account-create-update-ql94j"] Dec 05 17:49:53 crc kubenswrapper[4961]: E1205 17:49:53.159049 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerName="dnsmasq-dns" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.159066 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerName="dnsmasq-dns" Dec 05 17:49:53 crc kubenswrapper[4961]: E1205 17:49:53.159083 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerName="init" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.159089 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerName="init" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.159247 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6daed413-edbd-4f1e-8a9e-2d51d9223af2" containerName="dnsmasq-dns" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.159770 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.162204 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-db-secret" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.172343 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cf7d-account-create-update-ql94j"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.229033 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-create-9jlnd"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.230222 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.253237 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-9jlnd"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.272859 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-operator-scripts\") pod \"keystone-cf7d-account-create-update-ql94j\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.272914 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vqwnn\" (UniqueName: \"kubernetes.io/projected/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-kube-api-access-vqwnn\") pod \"keystone-cf7d-account-create-update-ql94j\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.374849 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-operator-scripts\") pod \"keystone-cf7d-account-create-update-ql94j\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.374925 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vqwnn\" (UniqueName: \"kubernetes.io/projected/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-kube-api-access-vqwnn\") pod \"keystone-cf7d-account-create-update-ql94j\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.374973 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70c92280-ca86-4655-bd02-85c60adfe674-operator-scripts\") pod \"keystone-db-create-9jlnd\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.375006 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tjpjp\" (UniqueName: \"kubernetes.io/projected/70c92280-ca86-4655-bd02-85c60adfe674-kube-api-access-tjpjp\") pod \"keystone-db-create-9jlnd\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.375542 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-operator-scripts\") pod \"keystone-cf7d-account-create-update-ql94j\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.396981 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vqwnn\" (UniqueName: \"kubernetes.io/projected/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-kube-api-access-vqwnn\") pod \"keystone-cf7d-account-create-update-ql94j\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.404208 4961 generic.go:334] "Generic (PLEG): container finished" podID="786a3535-1c16-4389-9239-49f6d349c3af" containerID="e6d9dd909e50d7a873522edf925dcaf6b602af8d0ee341873f53935d451e79bc" exitCode=0 Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.404849 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"786a3535-1c16-4389-9239-49f6d349c3af","Type":"ContainerDied","Data":"e6d9dd909e50d7a873522edf925dcaf6b602af8d0ee341873f53935d451e79bc"} Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.422664 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-create-qn94r"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.429730 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qn94r" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.434885 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qn94r"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.476833 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tjpjp\" (UniqueName: \"kubernetes.io/projected/70c92280-ca86-4655-bd02-85c60adfe674-kube-api-access-tjpjp\") pod \"keystone-db-create-9jlnd\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.477220 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70c92280-ca86-4655-bd02-85c60adfe674-operator-scripts\") pod \"keystone-db-create-9jlnd\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.477730 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.478491 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70c92280-ca86-4655-bd02-85c60adfe674-operator-scripts\") pod \"keystone-db-create-9jlnd\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.495756 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tjpjp\" (UniqueName: \"kubernetes.io/projected/70c92280-ca86-4655-bd02-85c60adfe674-kube-api-access-tjpjp\") pod \"keystone-db-create-9jlnd\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.538078 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/memcached-0" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.548748 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-4c80-account-create-update-rkrgv"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.549098 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.551066 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.553197 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-db-secret" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.559531 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4c80-account-create-update-rkrgv"] Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.578615 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-znxm6\" (UniqueName: \"kubernetes.io/projected/d90d35e5-699e-4f11-8f43-97c60a4364d9-kube-api-access-znxm6\") pod \"placement-db-create-qn94r\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " pod="openstack/placement-db-create-qn94r" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.578668 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90d35e5-699e-4f11-8f43-97c60a4364d9-operator-scripts\") pod \"placement-db-create-qn94r\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " pod="openstack/placement-db-create-qn94r" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.680498 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5mwl\" (UniqueName: \"kubernetes.io/projected/3cbe3354-8dc0-4907-b2b3-38a260064997-kube-api-access-n5mwl\") pod \"placement-4c80-account-create-update-rkrgv\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.680951 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cbe3354-8dc0-4907-b2b3-38a260064997-operator-scripts\") pod \"placement-4c80-account-create-update-rkrgv\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.681274 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-znxm6\" (UniqueName: \"kubernetes.io/projected/d90d35e5-699e-4f11-8f43-97c60a4364d9-kube-api-access-znxm6\") pod \"placement-db-create-qn94r\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " pod="openstack/placement-db-create-qn94r" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.681915 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90d35e5-699e-4f11-8f43-97c60a4364d9-operator-scripts\") pod \"placement-db-create-qn94r\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " pod="openstack/placement-db-create-qn94r" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.682740 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90d35e5-699e-4f11-8f43-97c60a4364d9-operator-scripts\") pod \"placement-db-create-qn94r\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " pod="openstack/placement-db-create-qn94r" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.783848 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cbe3354-8dc0-4907-b2b3-38a260064997-operator-scripts\") pod \"placement-4c80-account-create-update-rkrgv\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.784024 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5mwl\" (UniqueName: \"kubernetes.io/projected/3cbe3354-8dc0-4907-b2b3-38a260064997-kube-api-access-n5mwl\") pod \"placement-4c80-account-create-update-rkrgv\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:53 crc kubenswrapper[4961]: I1205 17:49:53.784759 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cbe3354-8dc0-4907-b2b3-38a260064997-operator-scripts\") pod \"placement-4c80-account-create-update-rkrgv\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.002744 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-znxm6\" (UniqueName: \"kubernetes.io/projected/d90d35e5-699e-4f11-8f43-97c60a4364d9-kube-api-access-znxm6\") pod \"placement-db-create-qn94r\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " pod="openstack/placement-db-create-qn94r" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.005626 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5mwl\" (UniqueName: \"kubernetes.io/projected/3cbe3354-8dc0-4907-b2b3-38a260064997-kube-api-access-n5mwl\") pod \"placement-4c80-account-create-update-rkrgv\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.038343 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qn94r" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.047668 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.133170 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-create-c45bb"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.136816 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.176328 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-c45bb"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.204197 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kx5p5\" (UniqueName: \"kubernetes.io/projected/f2b416cd-22e0-4619-9fcf-c8329062a46a-kube-api-access-kx5p5\") pod \"glance-db-create-c45bb\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.204240 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b416cd-22e0-4619-9fcf-c8329062a46a-operator-scripts\") pod \"glance-db-create-c45bb\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.305908 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kx5p5\" (UniqueName: \"kubernetes.io/projected/f2b416cd-22e0-4619-9fcf-c8329062a46a-kube-api-access-kx5p5\") pod \"glance-db-create-c45bb\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.305974 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b416cd-22e0-4619-9fcf-c8329062a46a-operator-scripts\") pod \"glance-db-create-c45bb\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.306834 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b416cd-22e0-4619-9fcf-c8329062a46a-operator-scripts\") pod \"glance-db-create-c45bb\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.316217 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-3f8b-account-create-update-p894d"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.317485 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.320619 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-db-secret" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.325488 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-3f8b-account-create-update-p894d"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.326526 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kx5p5\" (UniqueName: \"kubernetes.io/projected/f2b416cd-22e0-4619-9fcf-c8329062a46a-kube-api-access-kx5p5\") pod \"glance-db-create-c45bb\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.407137 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpkrt\" (UniqueName: \"kubernetes.io/projected/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-kube-api-access-gpkrt\") pod \"glance-3f8b-account-create-update-p894d\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.407214 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-operator-scripts\") pod \"glance-3f8b-account-create-update-p894d\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.459784 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c45bb" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.490124 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-create-9jlnd"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.503217 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cf7d-account-create-update-ql94j"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.508024 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpkrt\" (UniqueName: \"kubernetes.io/projected/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-kube-api-access-gpkrt\") pod \"glance-3f8b-account-create-update-p894d\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.508103 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-operator-scripts\") pod \"glance-3f8b-account-create-update-p894d\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.508993 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-operator-scripts\") pod \"glance-3f8b-account-create-update-p894d\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.534461 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpkrt\" (UniqueName: \"kubernetes.io/projected/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-kube-api-access-gpkrt\") pod \"glance-3f8b-account-create-update-p894d\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.644308 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.649563 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-4c80-account-create-update-rkrgv"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.656929 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-create-qn94r"] Dec 05 17:49:54 crc kubenswrapper[4961]: I1205 17:49:54.912647 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-create-c45bb"] Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.171492 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-3f8b-account-create-update-p894d"] Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.423898 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3f8b-account-create-update-p894d" event={"ID":"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a","Type":"ContainerStarted","Data":"5a1220d59fa4921479d1f2277fd481baf4c20ca9dae92f862850afd41f75eca4"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.425578 4961 generic.go:334] "Generic (PLEG): container finished" podID="5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" containerID="4ef62857d02f7fd3e385d7897cd58deda78db0018020b80ad34bf47aeb7d058b" exitCode=0 Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.425675 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cf7d-account-create-update-ql94j" event={"ID":"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6","Type":"ContainerDied","Data":"4ef62857d02f7fd3e385d7897cd58deda78db0018020b80ad34bf47aeb7d058b"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.425701 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cf7d-account-create-update-ql94j" event={"ID":"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6","Type":"ContainerStarted","Data":"6892f5132e29d8848e68b44c461416658e2b35f1b011d5cf0518a15169973f43"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.428536 4961 generic.go:334] "Generic (PLEG): container finished" podID="3cbe3354-8dc0-4907-b2b3-38a260064997" containerID="a6963f6e620d22e7e65c9339a5f343ae74d71bf2bf7e51b4b63fe8fdd72564f8" exitCode=0 Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.428761 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4c80-account-create-update-rkrgv" event={"ID":"3cbe3354-8dc0-4907-b2b3-38a260064997","Type":"ContainerDied","Data":"a6963f6e620d22e7e65c9339a5f343ae74d71bf2bf7e51b4b63fe8fdd72564f8"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.428825 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4c80-account-create-update-rkrgv" event={"ID":"3cbe3354-8dc0-4907-b2b3-38a260064997","Type":"ContainerStarted","Data":"09b933c39e53b6f5fd87abfc5ad86cebd2b8ce3970ea8e2027675de6eca752dc"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.436031 4961 generic.go:334] "Generic (PLEG): container finished" podID="f2b416cd-22e0-4619-9fcf-c8329062a46a" containerID="3e505e6137404d05c1de363640e5671731743a4a0a1f9dd7b6cacb74b9665149" exitCode=0 Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.436080 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-c45bb" event={"ID":"f2b416cd-22e0-4619-9fcf-c8329062a46a","Type":"ContainerDied","Data":"3e505e6137404d05c1de363640e5671731743a4a0a1f9dd7b6cacb74b9665149"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.436127 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-c45bb" event={"ID":"f2b416cd-22e0-4619-9fcf-c8329062a46a","Type":"ContainerStarted","Data":"5933b7ccf6890800d198a3f0a84bab81d4f91b34cb952b997ed7e2f936ac8422"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.443195 4961 generic.go:334] "Generic (PLEG): container finished" podID="70c92280-ca86-4655-bd02-85c60adfe674" containerID="a6370ab9954798b978c7ad927208085500e8292d87a042681373823797fab0a0" exitCode=0 Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.443315 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9jlnd" event={"ID":"70c92280-ca86-4655-bd02-85c60adfe674","Type":"ContainerDied","Data":"a6370ab9954798b978c7ad927208085500e8292d87a042681373823797fab0a0"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.443342 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9jlnd" event={"ID":"70c92280-ca86-4655-bd02-85c60adfe674","Type":"ContainerStarted","Data":"fd763ff12cf3bd2b7820f5f73ab103f5485c1935a0b83b39bc8d8ac49a8ccec0"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.448061 4961 generic.go:334] "Generic (PLEG): container finished" podID="d90d35e5-699e-4f11-8f43-97c60a4364d9" containerID="79f9b3d54473091cb81b6cec9b88dd26027f21f7e9a2be654aaaf8c8c001e420" exitCode=0 Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.448201 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qn94r" event={"ID":"d90d35e5-699e-4f11-8f43-97c60a4364d9","Type":"ContainerDied","Data":"79f9b3d54473091cb81b6cec9b88dd26027f21f7e9a2be654aaaf8c8c001e420"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.448239 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qn94r" event={"ID":"d90d35e5-699e-4f11-8f43-97c60a4364d9","Type":"ContainerStarted","Data":"6dec20895fe14905784c6254552bca198e131dabd6173a315ca22288d3bd8e2e"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.451995 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstack-cell1-galera-0" event={"ID":"786a3535-1c16-4389-9239-49f6d349c3af","Type":"ContainerStarted","Data":"5bf5b6d005157e1262dc832f5b60cbb70fdeac8a57797c243f712920e8c5fe7c"} Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.604350 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstack-cell1-galera-0" podStartSLOduration=-9223371983.250443 podStartE2EDuration="53.604332934s" podCreationTimestamp="2025-12-05 17:49:02 +0000 UTC" firstStartedPulling="2025-12-05 17:49:04.024044558 +0000 UTC m=+950.085195031" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:49:55.595147949 +0000 UTC m=+1001.656298422" watchObservedRunningTime="2025-12-05 17:49:55.604332934 +0000 UTC m=+1001.665483407" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.654566 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2jpmt"] Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.654794 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="dnsmasq-dns" containerID="cri-o://2d0fa3c69c3780d287657abfb32c09fc58128532def93035d4d9faa2903d83db" gracePeriod=10 Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.658077 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.752695 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-t65n2"] Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.757404 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.835191 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hw5hq\" (UniqueName: \"kubernetes.io/projected/27a3156a-6997-42ea-888b-ff4a4c2b1988-kube-api-access-hw5hq\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.869945 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.870515 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.870623 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.870735 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-config\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.873398 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-t65n2"] Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.972162 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hw5hq\" (UniqueName: \"kubernetes.io/projected/27a3156a-6997-42ea-888b-ff4a4c2b1988-kube-api-access-hw5hq\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.972248 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.972319 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.972358 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.972373 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-config\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.973225 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-config\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.974134 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-sb\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.974656 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-dns-svc\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:55 crc kubenswrapper[4961]: I1205 17:49:55.975282 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-nb\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.002860 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hw5hq\" (UniqueName: \"kubernetes.io/projected/27a3156a-6997-42ea-888b-ff4a4c2b1988-kube-api-access-hw5hq\") pod \"dnsmasq-dns-b8fbc5445-t65n2\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.240985 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.467823 4961 generic.go:334] "Generic (PLEG): container finished" podID="3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" containerID="6083168115dd228e12865685f426a7904025197e64de7785cb9a6bc158c8d2cc" exitCode=0 Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.467878 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3f8b-account-create-update-p894d" event={"ID":"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a","Type":"ContainerDied","Data":"6083168115dd228e12865685f426a7904025197e64de7785cb9a6bc158c8d2cc"} Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.470221 4961 generic.go:334] "Generic (PLEG): container finished" podID="83e4b124-11e4-425b-aae4-3996e61188af" containerID="2d0fa3c69c3780d287657abfb32c09fc58128532def93035d4d9faa2903d83db" exitCode=0 Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.470302 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" event={"ID":"83e4b124-11e4-425b-aae4-3996e61188af","Type":"ContainerDied","Data":"2d0fa3c69c3780d287657abfb32c09fc58128532def93035d4d9faa2903d83db"} Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.674571 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-t65n2"] Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.897526 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-storage-0"] Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.908981 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.909238 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.912063 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-conf" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.912196 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-files" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.912065 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-storage-config-data" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.913167 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-swift-dockercfg-ln8rz" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.967763 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.979579 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.999091 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.999133 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e533098a-ca28-487e-8471-7a426defda37-cache\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.999171 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.999187 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e533098a-ca28-487e-8471-7a426defda37-lock\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:56 crc kubenswrapper[4961]: I1205 17:49:56.999278 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hrvb\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-kube-api-access-4hrvb\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101042 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-ovsdbserver-sb\") pod \"83e4b124-11e4-425b-aae4-3996e61188af\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101214 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5gqmp\" (UniqueName: \"kubernetes.io/projected/83e4b124-11e4-425b-aae4-3996e61188af-kube-api-access-5gqmp\") pod \"83e4b124-11e4-425b-aae4-3996e61188af\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101280 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tjpjp\" (UniqueName: \"kubernetes.io/projected/70c92280-ca86-4655-bd02-85c60adfe674-kube-api-access-tjpjp\") pod \"70c92280-ca86-4655-bd02-85c60adfe674\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101369 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70c92280-ca86-4655-bd02-85c60adfe674-operator-scripts\") pod \"70c92280-ca86-4655-bd02-85c60adfe674\" (UID: \"70c92280-ca86-4655-bd02-85c60adfe674\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101464 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-dns-svc\") pod \"83e4b124-11e4-425b-aae4-3996e61188af\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101530 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-config\") pod \"83e4b124-11e4-425b-aae4-3996e61188af\" (UID: \"83e4b124-11e4-425b-aae4-3996e61188af\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.101989 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hrvb\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-kube-api-access-4hrvb\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.102129 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.102383 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e533098a-ca28-487e-8471-7a426defda37-cache\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.102453 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e533098a-ca28-487e-8471-7a426defda37-lock\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.102481 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.102587 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/70c92280-ca86-4655-bd02-85c60adfe674-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "70c92280-ca86-4655-bd02-85c60adfe674" (UID: "70c92280-ca86-4655-bd02-85c60adfe674"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.102741 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.102756 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.102821 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:49:57.602801183 +0000 UTC m=+1003.663951646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.103386 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") device mount path \"/mnt/openstack/pv09\"" pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.103744 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cache\" (UniqueName: \"kubernetes.io/empty-dir/e533098a-ca28-487e-8471-7a426defda37-cache\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.103982 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"lock\" (UniqueName: \"kubernetes.io/empty-dir/e533098a-ca28-487e-8471-7a426defda37-lock\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.108059 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/70c92280-ca86-4655-bd02-85c60adfe674-kube-api-access-tjpjp" (OuterVolumeSpecName: "kube-api-access-tjpjp") pod "70c92280-ca86-4655-bd02-85c60adfe674" (UID: "70c92280-ca86-4655-bd02-85c60adfe674"). InnerVolumeSpecName "kube-api-access-tjpjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.111122 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83e4b124-11e4-425b-aae4-3996e61188af-kube-api-access-5gqmp" (OuterVolumeSpecName: "kube-api-access-5gqmp") pod "83e4b124-11e4-425b-aae4-3996e61188af" (UID: "83e4b124-11e4-425b-aae4-3996e61188af"). InnerVolumeSpecName "kube-api-access-5gqmp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.131659 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hrvb\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-kube-api-access-4hrvb\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.137926 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c45bb" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.148636 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage09-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage09-crc\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.183327 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-config" (OuterVolumeSpecName: "config") pod "83e4b124-11e4-425b-aae4-3996e61188af" (UID: "83e4b124-11e4-425b-aae4-3996e61188af"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.187400 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "83e4b124-11e4-425b-aae4-3996e61188af" (UID: "83e4b124-11e4-425b-aae4-3996e61188af"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.201622 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.204812 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/70c92280-ca86-4655-bd02-85c60adfe674-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.204852 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.204879 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.204893 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5gqmp\" (UniqueName: \"kubernetes.io/projected/83e4b124-11e4-425b-aae4-3996e61188af-kube-api-access-5gqmp\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.204906 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tjpjp\" (UniqueName: \"kubernetes.io/projected/70c92280-ca86-4655-bd02-85c60adfe674-kube-api-access-tjpjp\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.205118 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qn94r" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.215032 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "83e4b124-11e4-425b-aae4-3996e61188af" (UID: "83e4b124-11e4-425b-aae4-3996e61188af"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.219035 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.247130 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.247358 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.306802 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.307958 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-operator-scripts\") pod \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308087 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kx5p5\" (UniqueName: \"kubernetes.io/projected/f2b416cd-22e0-4619-9fcf-c8329062a46a-kube-api-access-kx5p5\") pod \"f2b416cd-22e0-4619-9fcf-c8329062a46a\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308177 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b416cd-22e0-4619-9fcf-c8329062a46a-operator-scripts\") pod \"f2b416cd-22e0-4619-9fcf-c8329062a46a\" (UID: \"f2b416cd-22e0-4619-9fcf-c8329062a46a\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308288 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90d35e5-699e-4f11-8f43-97c60a4364d9-operator-scripts\") pod \"d90d35e5-699e-4f11-8f43-97c60a4364d9\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308406 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5mwl\" (UniqueName: \"kubernetes.io/projected/3cbe3354-8dc0-4907-b2b3-38a260064997-kube-api-access-n5mwl\") pod \"3cbe3354-8dc0-4907-b2b3-38a260064997\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308590 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cbe3354-8dc0-4907-b2b3-38a260064997-operator-scripts\") pod \"3cbe3354-8dc0-4907-b2b3-38a260064997\" (UID: \"3cbe3354-8dc0-4907-b2b3-38a260064997\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308703 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vqwnn\" (UniqueName: \"kubernetes.io/projected/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-kube-api-access-vqwnn\") pod \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\" (UID: \"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.308589 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" (UID: "5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.309089 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f2b416cd-22e0-4619-9fcf-c8329062a46a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f2b416cd-22e0-4619-9fcf-c8329062a46a" (UID: "f2b416cd-22e0-4619-9fcf-c8329062a46a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.309569 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-znxm6\" (UniqueName: \"kubernetes.io/projected/d90d35e5-699e-4f11-8f43-97c60a4364d9-kube-api-access-znxm6\") pod \"d90d35e5-699e-4f11-8f43-97c60a4364d9\" (UID: \"d90d35e5-699e-4f11-8f43-97c60a4364d9\") " Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.310131 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/83e4b124-11e4-425b-aae4-3996e61188af-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.310231 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.310315 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f2b416cd-22e0-4619-9fcf-c8329062a46a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.309601 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cbe3354-8dc0-4907-b2b3-38a260064997-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3cbe3354-8dc0-4907-b2b3-38a260064997" (UID: "3cbe3354-8dc0-4907-b2b3-38a260064997"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.309660 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d90d35e5-699e-4f11-8f43-97c60a4364d9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "d90d35e5-699e-4f11-8f43-97c60a4364d9" (UID: "d90d35e5-699e-4f11-8f43-97c60a4364d9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.317511 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-kube-api-access-vqwnn" (OuterVolumeSpecName: "kube-api-access-vqwnn") pod "5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" (UID: "5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6"). InnerVolumeSpecName "kube-api-access-vqwnn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.319344 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2b416cd-22e0-4619-9fcf-c8329062a46a-kube-api-access-kx5p5" (OuterVolumeSpecName: "kube-api-access-kx5p5") pod "f2b416cd-22e0-4619-9fcf-c8329062a46a" (UID: "f2b416cd-22e0-4619-9fcf-c8329062a46a"). InnerVolumeSpecName "kube-api-access-kx5p5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.319647 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cbe3354-8dc0-4907-b2b3-38a260064997-kube-api-access-n5mwl" (OuterVolumeSpecName: "kube-api-access-n5mwl") pod "3cbe3354-8dc0-4907-b2b3-38a260064997" (UID: "3cbe3354-8dc0-4907-b2b3-38a260064997"). InnerVolumeSpecName "kube-api-access-n5mwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.322366 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d90d35e5-699e-4f11-8f43-97c60a4364d9-kube-api-access-znxm6" (OuterVolumeSpecName: "kube-api-access-znxm6") pod "d90d35e5-699e-4f11-8f43-97c60a4364d9" (UID: "d90d35e5-699e-4f11-8f43-97c60a4364d9"). InnerVolumeSpecName "kube-api-access-znxm6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.411862 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3cbe3354-8dc0-4907-b2b3-38a260064997-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.412111 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vqwnn\" (UniqueName: \"kubernetes.io/projected/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6-kube-api-access-vqwnn\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.412436 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-znxm6\" (UniqueName: \"kubernetes.io/projected/d90d35e5-699e-4f11-8f43-97c60a4364d9-kube-api-access-znxm6\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.412507 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kx5p5\" (UniqueName: \"kubernetes.io/projected/f2b416cd-22e0-4619-9fcf-c8329062a46a-kube-api-access-kx5p5\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.412563 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/d90d35e5-699e-4f11-8f43-97c60a4364d9-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.412682 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5mwl\" (UniqueName: \"kubernetes.io/projected/3cbe3354-8dc0-4907-b2b3-38a260064997-kube-api-access-n5mwl\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435151 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-ring-rebalance-2mrxc"] Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435443 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="70c92280-ca86-4655-bd02-85c60adfe674" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435455 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="70c92280-ca86-4655-bd02-85c60adfe674" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435468 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" containerName="mariadb-account-create-update" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435475 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" containerName="mariadb-account-create-update" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435495 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="dnsmasq-dns" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435501 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="dnsmasq-dns" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435521 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2b416cd-22e0-4619-9fcf-c8329062a46a" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435528 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2b416cd-22e0-4619-9fcf-c8329062a46a" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435537 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3cbe3354-8dc0-4907-b2b3-38a260064997" containerName="mariadb-account-create-update" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435543 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3cbe3354-8dc0-4907-b2b3-38a260064997" containerName="mariadb-account-create-update" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435554 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d90d35e5-699e-4f11-8f43-97c60a4364d9" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435560 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="d90d35e5-699e-4f11-8f43-97c60a4364d9" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.435570 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="init" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435577 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="init" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435712 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="3cbe3354-8dc0-4907-b2b3-38a260064997" containerName="mariadb-account-create-update" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435725 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" containerName="mariadb-account-create-update" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435732 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="70c92280-ca86-4655-bd02-85c60adfe674" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435743 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="d90d35e5-699e-4f11-8f43-97c60a4364d9" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435756 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="dnsmasq-dns" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.435764 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2b416cd-22e0-4619-9fcf-c8329062a46a" containerName="mariadb-database-create" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.436442 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.439270 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-scripts" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.439449 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.439566 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"swift-ring-config-data" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.448015 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2mrxc"] Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.479274 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-create-qn94r" event={"ID":"d90d35e5-699e-4f11-8f43-97c60a4364d9","Type":"ContainerDied","Data":"6dec20895fe14905784c6254552bca198e131dabd6173a315ca22288d3bd8e2e"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.479300 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-create-qn94r" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.479309 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6dec20895fe14905784c6254552bca198e131dabd6173a315ca22288d3bd8e2e" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.480445 4961 generic.go:334] "Generic (PLEG): container finished" podID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerID="179c9efc21fecd93f996c3f35a2a1e198b49545d55959ab1aff0193c90987e87" exitCode=0 Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.480486 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" event={"ID":"27a3156a-6997-42ea-888b-ff4a4c2b1988","Type":"ContainerDied","Data":"179c9efc21fecd93f996c3f35a2a1e198b49545d55959ab1aff0193c90987e87"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.480502 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" event={"ID":"27a3156a-6997-42ea-888b-ff4a4c2b1988","Type":"ContainerStarted","Data":"1e94cd75ef87e30eecf906cc11d1b1c7725a1cd0d104f11f277a4e2d1bcd8f4b"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.482833 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cf7d-account-create-update-ql94j" event={"ID":"5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6","Type":"ContainerDied","Data":"6892f5132e29d8848e68b44c461416658e2b35f1b011d5cf0518a15169973f43"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.482857 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6892f5132e29d8848e68b44c461416658e2b35f1b011d5cf0518a15169973f43" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.483041 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cf7d-account-create-update-ql94j" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.490597 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-4c80-account-create-update-rkrgv" event={"ID":"3cbe3354-8dc0-4907-b2b3-38a260064997","Type":"ContainerDied","Data":"09b933c39e53b6f5fd87abfc5ad86cebd2b8ce3970ea8e2027675de6eca752dc"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.490634 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="09b933c39e53b6f5fd87abfc5ad86cebd2b8ce3970ea8e2027675de6eca752dc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.490682 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-4c80-account-create-update-rkrgv" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.513230 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-create-c45bb" event={"ID":"f2b416cd-22e0-4619-9fcf-c8329062a46a","Type":"ContainerDied","Data":"5933b7ccf6890800d198a3f0a84bab81d4f91b34cb952b997ed7e2f936ac8422"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.513286 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5933b7ccf6890800d198a3f0a84bab81d4f91b34cb952b997ed7e2f936ac8422" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.513544 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-create-c45bb" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.515114 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-dispersionconf\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.515186 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-combined-ca-bundle\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.516079 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" event={"ID":"83e4b124-11e4-425b-aae4-3996e61188af","Type":"ContainerDied","Data":"901137673e05cd0d473abfdf5c799230dea8dd4237cf8a69df670f99f648aa25"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.516130 4961 scope.go:117] "RemoveContainer" containerID="2d0fa3c69c3780d287657abfb32c09fc58128532def93035d4d9faa2903d83db" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.516268 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.518519 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-create-9jlnd" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.518679 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-create-9jlnd" event={"ID":"70c92280-ca86-4655-bd02-85c60adfe674","Type":"ContainerDied","Data":"fd763ff12cf3bd2b7820f5f73ab103f5485c1935a0b83b39bc8d8ac49a8ccec0"} Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.518822 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fd763ff12cf3bd2b7820f5f73ab103f5485c1935a0b83b39bc8d8ac49a8ccec0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.539913 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-swiftconf\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.540019 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-ring-data-devices\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.540166 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-scripts\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.540229 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b26a944-dad9-45ea-b636-5f2ddaadc80d-etc-swift\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.540265 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2b5sp\" (UniqueName: \"kubernetes.io/projected/2b26a944-dad9-45ea-b636-5f2ddaadc80d-kube-api-access-2b5sp\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.545982 4961 scope.go:117] "RemoveContainer" containerID="5abf322af3208c13a1ccfc8eb5bb766f4a0ecae89086effded1e44a3055125c9" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.585409 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2jpmt"] Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.600811 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bc7876d45-2jpmt"] Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641232 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-dispersionconf\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641342 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-combined-ca-bundle\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641385 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-swiftconf\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641425 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-ring-data-devices\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641479 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-scripts\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641502 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641519 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b26a944-dad9-45ea-b636-5f2ddaadc80d-etc-swift\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.641534 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2b5sp\" (UniqueName: \"kubernetes.io/projected/2b26a944-dad9-45ea-b636-5f2ddaadc80d-kube-api-access-2b5sp\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.642325 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-ring-data-devices\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.643099 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.643115 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:49:57 crc kubenswrapper[4961]: E1205 17:49:57.643149 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:49:58.643136516 +0000 UTC m=+1004.704286989 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.643562 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b26a944-dad9-45ea-b636-5f2ddaadc80d-etc-swift\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.643702 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-scripts\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.646663 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-swiftconf\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.646746 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-combined-ca-bundle\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.647218 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-dispersionconf\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.664450 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2b5sp\" (UniqueName: \"kubernetes.io/projected/2b26a944-dad9-45ea-b636-5f2ddaadc80d-kube-api-access-2b5sp\") pod \"swift-ring-rebalance-2mrxc\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.769763 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:49:57 crc kubenswrapper[4961]: I1205 17:49:57.884862 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.047695 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-operator-scripts\") pod \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.048060 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpkrt\" (UniqueName: \"kubernetes.io/projected/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-kube-api-access-gpkrt\") pod \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\" (UID: \"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a\") " Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.050513 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" (UID: "3c4069b0-74bf-4e21-a280-ae69cfcbcd9a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.056071 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-kube-api-access-gpkrt" (OuterVolumeSpecName: "kube-api-access-gpkrt") pod "3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" (UID: "3c4069b0-74bf-4e21-a280-ae69cfcbcd9a"). InnerVolumeSpecName "kube-api-access-gpkrt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.150997 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.151032 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpkrt\" (UniqueName: \"kubernetes.io/projected/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a-kube-api-access-gpkrt\") on node \"crc\" DevicePath \"\"" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.226321 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-ring-rebalance-2mrxc"] Dec 05 17:49:58 crc kubenswrapper[4961]: W1205 17:49:58.230380 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2b26a944_dad9_45ea_b636_5f2ddaadc80d.slice/crio-ac5b2a1116d127222de7a15421acd6e5d097984f73bc8b68b744f2a57d922200 WatchSource:0}: Error finding container ac5b2a1116d127222de7a15421acd6e5d097984f73bc8b68b744f2a57d922200: Status 404 returned error can't find the container with id ac5b2a1116d127222de7a15421acd6e5d097984f73bc8b68b744f2a57d922200 Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.527346 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" event={"ID":"27a3156a-6997-42ea-888b-ff4a4c2b1988","Type":"ContainerStarted","Data":"761b369dfd98e3c652aab0588518ce5d190a38dcdcc629b8ef251ed12fbb3b31"} Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.527405 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.528918 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-3f8b-account-create-update-p894d" event={"ID":"3c4069b0-74bf-4e21-a280-ae69cfcbcd9a","Type":"ContainerDied","Data":"5a1220d59fa4921479d1f2277fd481baf4c20ca9dae92f862850afd41f75eca4"} Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.528941 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5a1220d59fa4921479d1f2277fd481baf4c20ca9dae92f862850afd41f75eca4" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.528993 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-3f8b-account-create-update-p894d" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.531123 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2mrxc" event={"ID":"2b26a944-dad9-45ea-b636-5f2ddaadc80d","Type":"ContainerStarted","Data":"ac5b2a1116d127222de7a15421acd6e5d097984f73bc8b68b744f2a57d922200"} Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.553256 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" podStartSLOduration=3.553234476 podStartE2EDuration="3.553234476s" podCreationTimestamp="2025-12-05 17:49:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:49:58.552094017 +0000 UTC m=+1004.613244500" watchObservedRunningTime="2025-12-05 17:49:58.553234476 +0000 UTC m=+1004.614384949" Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.658114 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:49:58 crc kubenswrapper[4961]: E1205 17:49:58.658585 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:49:58 crc kubenswrapper[4961]: E1205 17:49:58.658615 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:49:58 crc kubenswrapper[4961]: E1205 17:49:58.658664 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:50:00.658645207 +0000 UTC m=+1006.719795790 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:49:58 crc kubenswrapper[4961]: I1205 17:49:58.874386 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83e4b124-11e4-425b-aae4-3996e61188af" path="/var/lib/kubelet/pods/83e4b124-11e4-425b-aae4-3996e61188af/volumes" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.520653 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-db-sync-ghqpp"] Dec 05 17:49:59 crc kubenswrapper[4961]: E1205 17:49:59.521301 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" containerName="mariadb-account-create-update" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.521324 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" containerName="mariadb-account-create-update" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.521552 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" containerName="mariadb-account-create-update" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.522167 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.524109 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-config-data" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.534936 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f5xbg" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.537949 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-ghqpp"] Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.683503 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-combined-ca-bundle\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.683563 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-config-data\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.683637 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-db-sync-config-data\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.683672 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnll2\" (UniqueName: \"kubernetes.io/projected/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-kube-api-access-cnll2\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.785176 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-config-data\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.785350 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-db-sync-config-data\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.785404 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnll2\" (UniqueName: \"kubernetes.io/projected/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-kube-api-access-cnll2\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.785541 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-combined-ca-bundle\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.795573 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-db-sync-config-data\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.800680 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-config-data\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.803378 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-combined-ca-bundle\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.803835 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnll2\" (UniqueName: \"kubernetes.io/projected/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-kube-api-access-cnll2\") pod \"glance-db-sync-ghqpp\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " pod="openstack/glance-db-sync-ghqpp" Dec 05 17:49:59 crc kubenswrapper[4961]: I1205 17:49:59.850416 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ghqpp" Dec 05 17:50:00 crc kubenswrapper[4961]: I1205 17:50:00.377745 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-db-sync-ghqpp"] Dec 05 17:50:00 crc kubenswrapper[4961]: I1205 17:50:00.555025 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ghqpp" event={"ID":"0ec59b22-f0c6-4de1-a447-59eb40a7c89d","Type":"ContainerStarted","Data":"320705fb153610b60b8b92073531f367de5c2b21c0e53d556eef64ab5df80098"} Dec 05 17:50:00 crc kubenswrapper[4961]: I1205 17:50:00.703097 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:50:00 crc kubenswrapper[4961]: E1205 17:50:00.703282 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:50:00 crc kubenswrapper[4961]: E1205 17:50:00.703299 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:50:00 crc kubenswrapper[4961]: E1205 17:50:00.703343 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:50:04.703328904 +0000 UTC m=+1010.764479377 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:50:01 crc kubenswrapper[4961]: I1205 17:50:01.925046 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-6bc7876d45-2jpmt" podUID="83e4b124-11e4-425b-aae4-3996e61188af" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.110:5353: i/o timeout" Dec 05 17:50:03 crc kubenswrapper[4961]: I1205 17:50:03.153760 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-northd-0" Dec 05 17:50:03 crc kubenswrapper[4961]: I1205 17:50:03.401840 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/openstack-cell1-galera-0" Dec 05 17:50:03 crc kubenswrapper[4961]: I1205 17:50:03.401886 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/openstack-cell1-galera-0" Dec 05 17:50:03 crc kubenswrapper[4961]: I1205 17:50:03.494513 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/openstack-cell1-galera-0" Dec 05 17:50:03 crc kubenswrapper[4961]: I1205 17:50:03.688121 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/openstack-cell1-galera-0" Dec 05 17:50:04 crc kubenswrapper[4961]: I1205 17:50:04.776174 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:50:04 crc kubenswrapper[4961]: E1205 17:50:04.776542 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:50:04 crc kubenswrapper[4961]: E1205 17:50:04.776556 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:50:04 crc kubenswrapper[4961]: E1205 17:50:04.776598 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:50:12.776583642 +0000 UTC m=+1018.837734115 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:50:06 crc kubenswrapper[4961]: I1205 17:50:06.243093 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:50:06 crc kubenswrapper[4961]: I1205 17:50:06.399428 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-qggrr"] Dec 05 17:50:06 crc kubenswrapper[4961]: I1205 17:50:06.399806 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-8554648995-qggrr" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" containerID="cri-o://3a6acdcab74ac97579b623373377ba5aa4e939301b5b8d1418433de339074323" gracePeriod=10 Dec 05 17:50:07 crc kubenswrapper[4961]: I1205 17:50:07.306993 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-qggrr" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.003600 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ovn-controller-ldph7" podUID="6b6b1e99-e081-4c93-8fe8-c693eb7a0205" containerName="ovn-controller" probeResult="failure" output=< Dec 05 17:50:10 crc kubenswrapper[4961]: ERROR - ovn-controller connection status is 'not connected', expecting 'connected' status Dec 05 17:50:10 crc kubenswrapper[4961]: > Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.049603 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.049658 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ovs-c9ff2" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.259591 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-controller-ldph7-config-pl2pp"] Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.261721 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.267299 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-extra-scripts" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.271391 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ldph7-config-pl2pp"] Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.401009 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-log-ovn\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.401065 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tswct\" (UniqueName: \"kubernetes.io/projected/d3aca988-775e-408c-80f8-61a5ace9d645-kube-api-access-tswct\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.401227 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run-ovn\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.401276 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-scripts\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.401320 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.401373 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-additional-scripts\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503130 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run-ovn\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503185 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-scripts\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503223 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503272 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-additional-scripts\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503354 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-log-ovn\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503374 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tswct\" (UniqueName: \"kubernetes.io/projected/d3aca988-775e-408c-80f8-61a5ace9d645-kube-api-access-tswct\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503459 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run-ovn\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503513 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.503558 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-log-ovn\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.504560 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-additional-scripts\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.505568 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-scripts\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.528439 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tswct\" (UniqueName: \"kubernetes.io/projected/d3aca988-775e-408c-80f8-61a5ace9d645-kube-api-access-tswct\") pod \"ovn-controller-ldph7-config-pl2pp\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.589291 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.697217 4961 generic.go:334] "Generic (PLEG): container finished" podID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerID="3a6acdcab74ac97579b623373377ba5aa4e939301b5b8d1418433de339074323" exitCode=0 Dec 05 17:50:10 crc kubenswrapper[4961]: I1205 17:50:10.697267 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-qggrr" event={"ID":"50c2c6e1-8c4d-4464-a362-3e99192aa795","Type":"ContainerDied","Data":"3a6acdcab74ac97579b623373377ba5aa4e939301b5b8d1418433de339074323"} Dec 05 17:50:12 crc kubenswrapper[4961]: I1205 17:50:12.306617 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-qggrr" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: connect: connection refused" Dec 05 17:50:12 crc kubenswrapper[4961]: I1205 17:50:12.843583 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:50:12 crc kubenswrapper[4961]: E1205 17:50:12.843907 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:50:12 crc kubenswrapper[4961]: E1205 17:50:12.843951 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:50:12 crc kubenswrapper[4961]: E1205 17:50:12.844040 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:50:28.844015003 +0000 UTC m=+1034.905165506 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:50:14 crc kubenswrapper[4961]: I1205 17:50:14.042128 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-controller-ldph7-config-pl2pp"] Dec 05 17:50:14 crc kubenswrapper[4961]: W1205 17:50:14.046378 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podd3aca988_775e_408c_80f8_61a5ace9d645.slice/crio-6a8ee24b9487cbb52df8188bea71b0cfd799396df40e3115461b6c47c2be5359 WatchSource:0}: Error finding container 6a8ee24b9487cbb52df8188bea71b0cfd799396df40e3115461b6c47c2be5359: Status 404 returned error can't find the container with id 6a8ee24b9487cbb52df8188bea71b0cfd799396df40e3115461b6c47c2be5359 Dec 05 17:50:14 crc kubenswrapper[4961]: E1205 17:50:14.762709 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified" Dec 05 17:50:14 crc kubenswrapper[4961]: E1205 17:50:14.763255 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:swift-ring-rebalance,Image:quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified,Command:[/usr/local/bin/swift-ring-tool all],Args:[],WorkingDir:/etc/swift,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CM_NAME,Value:swift-ring-files,ValueFrom:nil,},EnvVar{Name:NAMESPACE,Value:openstack,ValueFrom:nil,},EnvVar{Name:OWNER_APIVERSION,Value:swift.openstack.org/v1beta1,ValueFrom:nil,},EnvVar{Name:OWNER_KIND,Value:SwiftRing,ValueFrom:nil,},EnvVar{Name:OWNER_NAME,Value:swift-ring,ValueFrom:nil,},EnvVar{Name:OWNER_UID,Value:54aa2dc0-3ddc-4a39-ab36-e9c390bacee5,ValueFrom:nil,},EnvVar{Name:SWIFT_MIN_PART_HOURS,Value:1,ValueFrom:nil,},EnvVar{Name:SWIFT_PART_POWER,Value:10,ValueFrom:nil,},EnvVar{Name:SWIFT_REPLICAS,Value:1,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/swift-ring-tool,SubPath:swift-ring-tool,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:swiftconf,ReadOnly:true,MountPath:/etc/swift/swift.conf,SubPath:swift.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:etc-swift,ReadOnly:false,MountPath:/etc/swift,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ring-data-devices,ReadOnly:true,MountPath:/var/lib/config-data/ring-devices,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:dispersionconf,ReadOnly:true,MountPath:/etc/swift/dispersion.conf,SubPath:dispersion.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-2b5sp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42445,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod swift-ring-rebalance-2mrxc_openstack(2b26a944-dad9-45ea-b636-5f2ddaadc80d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:50:14 crc kubenswrapper[4961]: E1205 17:50:14.764745 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/swift-ring-rebalance-2mrxc" podUID="2b26a944-dad9-45ea-b636-5f2ddaadc80d" Dec 05 17:50:14 crc kubenswrapper[4961]: I1205 17:50:14.780450 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ldph7-config-pl2pp" event={"ID":"d3aca988-775e-408c-80f8-61a5ace9d645","Type":"ContainerStarted","Data":"0c46fd593cd74d7c79826f40150b74459107d2fa2b260352f72626e5aace9e13"} Dec 05 17:50:14 crc kubenswrapper[4961]: I1205 17:50:14.780527 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ldph7-config-pl2pp" event={"ID":"d3aca988-775e-408c-80f8-61a5ace9d645","Type":"ContainerStarted","Data":"6a8ee24b9487cbb52df8188bea71b0cfd799396df40e3115461b6c47c2be5359"} Dec 05 17:50:14 crc kubenswrapper[4961]: I1205 17:50:14.799434 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ovn-controller-ldph7-config-pl2pp" podStartSLOduration=4.799410674 podStartE2EDuration="4.799410674s" podCreationTimestamp="2025-12-05 17:50:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:14.797960978 +0000 UTC m=+1020.859111451" watchObservedRunningTime="2025-12-05 17:50:14.799410674 +0000 UTC m=+1020.860561147" Dec 05 17:50:15 crc kubenswrapper[4961]: I1205 17:50:15.000820 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ovn-controller-ldph7" Dec 05 17:50:15 crc kubenswrapper[4961]: I1205 17:50:15.790982 4961 generic.go:334] "Generic (PLEG): container finished" podID="d3aca988-775e-408c-80f8-61a5ace9d645" containerID="0c46fd593cd74d7c79826f40150b74459107d2fa2b260352f72626e5aace9e13" exitCode=0 Dec 05 17:50:15 crc kubenswrapper[4961]: I1205 17:50:15.791033 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ldph7-config-pl2pp" event={"ID":"d3aca988-775e-408c-80f8-61a5ace9d645","Type":"ContainerDied","Data":"0c46fd593cd74d7c79826f40150b74459107d2fa2b260352f72626e5aace9e13"} Dec 05 17:50:15 crc kubenswrapper[4961]: E1205 17:50:15.793555 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"swift-ring-rebalance\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-swift-proxy-server:current-podified\\\"\"" pod="openstack/swift-ring-rebalance-2mrxc" podUID="2b26a944-dad9-45ea-b636-5f2ddaadc80d" Dec 05 17:50:18 crc kubenswrapper[4961]: I1205 17:50:18.822711 4961 generic.go:334] "Generic (PLEG): container finished" podID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerID="c802bc16e5515abf77c97c7accaacad9ade519a54f952519cddc8c8437ae18cf" exitCode=0 Dec 05 17:50:18 crc kubenswrapper[4961]: I1205 17:50:18.822852 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"346da897-3e71-4d6f-b17d-fe5f905dd705","Type":"ContainerDied","Data":"c802bc16e5515abf77c97c7accaacad9ade519a54f952519cddc8c8437ae18cf"} Dec 05 17:50:21 crc kubenswrapper[4961]: I1205 17:50:21.849351 4961 generic.go:334] "Generic (PLEG): container finished" podID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerID="d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43" exitCode=0 Dec 05 17:50:21 crc kubenswrapper[4961]: I1205 17:50:21.849468 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"494ff2b1-6bb3-4c8a-be81-02fe6f884caa","Type":"ContainerDied","Data":"d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43"} Dec 05 17:50:22 crc kubenswrapper[4961]: I1205 17:50:22.307307 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-qggrr" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Dec 05 17:50:22 crc kubenswrapper[4961]: I1205 17:50:22.307594 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:50:22 crc kubenswrapper[4961]: E1205 17:50:22.916060 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-glance-api:current-podified" Dec 05 17:50:22 crc kubenswrapper[4961]: E1205 17:50:22.916628 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:glance-db-sync,Image:quay.io/podified-antelope-centos9/openstack-glance-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/glance/glance.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-cnll2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42415,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42415,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod glance-db-sync-ghqpp_openstack(0ec59b22-f0c6-4de1-a447-59eb40a7c89d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:50:22 crc kubenswrapper[4961]: E1205 17:50:22.918503 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/glance-db-sync-ghqpp" podUID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.010561 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029106 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tswct\" (UniqueName: \"kubernetes.io/projected/d3aca988-775e-408c-80f8-61a5ace9d645-kube-api-access-tswct\") pod \"d3aca988-775e-408c-80f8-61a5ace9d645\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029254 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-additional-scripts\") pod \"d3aca988-775e-408c-80f8-61a5ace9d645\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029288 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-log-ovn\") pod \"d3aca988-775e-408c-80f8-61a5ace9d645\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029308 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run-ovn\") pod \"d3aca988-775e-408c-80f8-61a5ace9d645\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029326 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run\") pod \"d3aca988-775e-408c-80f8-61a5ace9d645\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029373 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-scripts\") pod \"d3aca988-775e-408c-80f8-61a5ace9d645\" (UID: \"d3aca988-775e-408c-80f8-61a5ace9d645\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029449 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-log-ovn" (OuterVolumeSpecName: "var-log-ovn") pod "d3aca988-775e-408c-80f8-61a5ace9d645" (UID: "d3aca988-775e-408c-80f8-61a5ace9d645"). InnerVolumeSpecName "var-log-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.029732 4961 reconciler_common.go:293] "Volume detached for volume \"var-log-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-log-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.030599 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-additional-scripts" (OuterVolumeSpecName: "additional-scripts") pod "d3aca988-775e-408c-80f8-61a5ace9d645" (UID: "d3aca988-775e-408c-80f8-61a5ace9d645"). InnerVolumeSpecName "additional-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.030650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run-ovn" (OuterVolumeSpecName: "var-run-ovn") pod "d3aca988-775e-408c-80f8-61a5ace9d645" (UID: "d3aca988-775e-408c-80f8-61a5ace9d645"). InnerVolumeSpecName "var-run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.030681 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run" (OuterVolumeSpecName: "var-run") pod "d3aca988-775e-408c-80f8-61a5ace9d645" (UID: "d3aca988-775e-408c-80f8-61a5ace9d645"). InnerVolumeSpecName "var-run". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.031066 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-scripts" (OuterVolumeSpecName: "scripts") pod "d3aca988-775e-408c-80f8-61a5ace9d645" (UID: "d3aca988-775e-408c-80f8-61a5ace9d645"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.041341 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3aca988-775e-408c-80f8-61a5ace9d645-kube-api-access-tswct" (OuterVolumeSpecName: "kube-api-access-tswct") pod "d3aca988-775e-408c-80f8-61a5ace9d645" (UID: "d3aca988-775e-408c-80f8-61a5ace9d645"). InnerVolumeSpecName "kube-api-access-tswct". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.043180 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.130507 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-config\") pod \"50c2c6e1-8c4d-4464-a362-3e99192aa795\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.130681 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4s2fv\" (UniqueName: \"kubernetes.io/projected/50c2c6e1-8c4d-4464-a362-3e99192aa795-kube-api-access-4s2fv\") pod \"50c2c6e1-8c4d-4464-a362-3e99192aa795\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.130711 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-nb\") pod \"50c2c6e1-8c4d-4464-a362-3e99192aa795\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.130811 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-dns-svc\") pod \"50c2c6e1-8c4d-4464-a362-3e99192aa795\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.130831 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-sb\") pod \"50c2c6e1-8c4d-4464-a362-3e99192aa795\" (UID: \"50c2c6e1-8c4d-4464-a362-3e99192aa795\") " Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.131217 4961 reconciler_common.go:293] "Volume detached for volume \"var-run-ovn\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run-ovn\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.131229 4961 reconciler_common.go:293] "Volume detached for volume \"var-run\" (UniqueName: \"kubernetes.io/host-path/d3aca988-775e-408c-80f8-61a5ace9d645-var-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.131237 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.131246 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tswct\" (UniqueName: \"kubernetes.io/projected/d3aca988-775e-408c-80f8-61a5ace9d645-kube-api-access-tswct\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.131259 4961 reconciler_common.go:293] "Volume detached for volume \"additional-scripts\" (UniqueName: \"kubernetes.io/configmap/d3aca988-775e-408c-80f8-61a5ace9d645-additional-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.140108 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/50c2c6e1-8c4d-4464-a362-3e99192aa795-kube-api-access-4s2fv" (OuterVolumeSpecName: "kube-api-access-4s2fv") pod "50c2c6e1-8c4d-4464-a362-3e99192aa795" (UID: "50c2c6e1-8c4d-4464-a362-3e99192aa795"). InnerVolumeSpecName "kube-api-access-4s2fv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.188530 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "50c2c6e1-8c4d-4464-a362-3e99192aa795" (UID: "50c2c6e1-8c4d-4464-a362-3e99192aa795"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.191429 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "50c2c6e1-8c4d-4464-a362-3e99192aa795" (UID: "50c2c6e1-8c4d-4464-a362-3e99192aa795"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.194655 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-config" (OuterVolumeSpecName: "config") pod "50c2c6e1-8c4d-4464-a362-3e99192aa795" (UID: "50c2c6e1-8c4d-4464-a362-3e99192aa795"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.197652 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "50c2c6e1-8c4d-4464-a362-3e99192aa795" (UID: "50c2c6e1-8c4d-4464-a362-3e99192aa795"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.232450 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.232499 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.232511 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.232521 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4s2fv\" (UniqueName: \"kubernetes.io/projected/50c2c6e1-8c4d-4464-a362-3e99192aa795-kube-api-access-4s2fv\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.232531 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/50c2c6e1-8c4d-4464-a362-3e99192aa795-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.874977 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"346da897-3e71-4d6f-b17d-fe5f905dd705","Type":"ContainerStarted","Data":"da4f23365e5ecafdc73b5143c8bd174d0469c2a2473dc2c85cccd597fef41bdc"} Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.876098 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.877538 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-controller-ldph7-config-pl2pp" event={"ID":"d3aca988-775e-408c-80f8-61a5ace9d645","Type":"ContainerDied","Data":"6a8ee24b9487cbb52df8188bea71b0cfd799396df40e3115461b6c47c2be5359"} Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.877728 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-controller-ldph7-config-pl2pp" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.877745 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6a8ee24b9487cbb52df8188bea71b0cfd799396df40e3115461b6c47c2be5359" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.880071 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8554648995-qggrr" event={"ID":"50c2c6e1-8c4d-4464-a362-3e99192aa795","Type":"ContainerDied","Data":"8789cc6f4b7a82dc930e7e96550002a9a8cc7341afa26c8385621fd49b58722e"} Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.880360 4961 scope.go:117] "RemoveContainer" containerID="3a6acdcab74ac97579b623373377ba5aa4e939301b5b8d1418433de339074323" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.880837 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8554648995-qggrr" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.900103 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"494ff2b1-6bb3-4c8a-be81-02fe6f884caa","Type":"ContainerStarted","Data":"308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5"} Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.900424 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:50:23 crc kubenswrapper[4961]: E1205 17:50:23.901826 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"glance-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-glance-api:current-podified\\\"\"" pod="openstack/glance-db-sync-ghqpp" podUID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.918564 4961 scope.go:117] "RemoveContainer" containerID="aa5577c9449f6c2cbc0c0bf3eb9fd27ed9331244ae22e970acbee971b1db4509" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.925968 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=43.595253489 podStartE2EDuration="1m25.925942522s" podCreationTimestamp="2025-12-05 17:48:58 +0000 UTC" firstStartedPulling="2025-12-05 17:49:02.005925362 +0000 UTC m=+948.067075835" lastFinishedPulling="2025-12-05 17:49:44.336614395 +0000 UTC m=+990.397764868" observedRunningTime="2025-12-05 17:50:23.915950438 +0000 UTC m=+1029.977100961" watchObservedRunningTime="2025-12-05 17:50:23.925942522 +0000 UTC m=+1029.987093005" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.948753 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=-9223371951.90605 podStartE2EDuration="1m24.948725421s" podCreationTimestamp="2025-12-05 17:48:59 +0000 UTC" firstStartedPulling="2025-12-05 17:49:02.005941452 +0000 UTC m=+948.067091935" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:23.942567549 +0000 UTC m=+1030.003718022" watchObservedRunningTime="2025-12-05 17:50:23.948725421 +0000 UTC m=+1030.009875914" Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.978448 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-8554648995-qggrr"] Dec 05 17:50:23 crc kubenswrapper[4961]: I1205 17:50:23.988558 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-8554648995-qggrr"] Dec 05 17:50:24 crc kubenswrapper[4961]: I1205 17:50:24.132180 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ovn-controller-ldph7-config-pl2pp"] Dec 05 17:50:24 crc kubenswrapper[4961]: I1205 17:50:24.140055 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ovn-controller-ldph7-config-pl2pp"] Dec 05 17:50:24 crc kubenswrapper[4961]: I1205 17:50:24.877934 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" path="/var/lib/kubelet/pods/50c2c6e1-8c4d-4464-a362-3e99192aa795/volumes" Dec 05 17:50:24 crc kubenswrapper[4961]: I1205 17:50:24.878702 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3aca988-775e-408c-80f8-61a5ace9d645" path="/var/lib/kubelet/pods/d3aca988-775e-408c-80f8-61a5ace9d645/volumes" Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.245746 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.246095 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.246135 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.246826 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7d9ed5247bcd400783c6d7e2c5bad6038c840849b5210eed14fdd0422b90e593"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.246884 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://7d9ed5247bcd400783c6d7e2c5bad6038c840849b5210eed14fdd0422b90e593" gracePeriod=600 Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.308344 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-8554648995-qggrr" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.111:5353: i/o timeout" Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.936737 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="7d9ed5247bcd400783c6d7e2c5bad6038c840849b5210eed14fdd0422b90e593" exitCode=0 Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.936808 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"7d9ed5247bcd400783c6d7e2c5bad6038c840849b5210eed14fdd0422b90e593"} Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.937382 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"ea0a333281fac885bbddc52760bdc853687db225a39907ca95a10b1226157c7e"} Dec 05 17:50:27 crc kubenswrapper[4961]: I1205 17:50:27.937407 4961 scope.go:117] "RemoveContainer" containerID="8298295ec17dfc63ef58bd3072d80116c1dcc72b110ce7d9f28c0734b811e20a" Dec 05 17:50:28 crc kubenswrapper[4961]: I1205 17:50:28.849761 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:50:28 crc kubenswrapper[4961]: E1205 17:50:28.849917 4961 projected.go:288] Couldn't get configMap openstack/swift-ring-files: configmap "swift-ring-files" not found Dec 05 17:50:28 crc kubenswrapper[4961]: E1205 17:50:28.849941 4961 projected.go:194] Error preparing data for projected volume etc-swift for pod openstack/swift-storage-0: configmap "swift-ring-files" not found Dec 05 17:50:28 crc kubenswrapper[4961]: E1205 17:50:28.850005 4961 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift podName:e533098a-ca28-487e-8471-7a426defda37 nodeName:}" failed. No retries permitted until 2025-12-05 17:51:00.849986807 +0000 UTC m=+1066.911137280 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "etc-swift" (UniqueName: "kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift") pod "swift-storage-0" (UID: "e533098a-ca28-487e-8471-7a426defda37") : configmap "swift-ring-files" not found Dec 05 17:50:30 crc kubenswrapper[4961]: I1205 17:50:30.967294 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2mrxc" event={"ID":"2b26a944-dad9-45ea-b636-5f2ddaadc80d","Type":"ContainerStarted","Data":"550dccdd6e207f6beb90fe639060c64f36976533ef5bdbced49367bc556e9957"} Dec 05 17:50:30 crc kubenswrapper[4961]: I1205 17:50:30.992130 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-ring-rebalance-2mrxc" podStartSLOduration=1.908352362 podStartE2EDuration="33.992112587s" podCreationTimestamp="2025-12-05 17:49:57 +0000 UTC" firstStartedPulling="2025-12-05 17:49:58.232797968 +0000 UTC m=+1004.293948441" lastFinishedPulling="2025-12-05 17:50:30.316558193 +0000 UTC m=+1036.377708666" observedRunningTime="2025-12-05 17:50:30.986987762 +0000 UTC m=+1037.048138265" watchObservedRunningTime="2025-12-05 17:50:30.992112587 +0000 UTC m=+1037.053263060" Dec 05 17:50:37 crc kubenswrapper[4961]: I1205 17:50:37.026673 4961 generic.go:334] "Generic (PLEG): container finished" podID="2b26a944-dad9-45ea-b636-5f2ddaadc80d" containerID="550dccdd6e207f6beb90fe639060c64f36976533ef5bdbced49367bc556e9957" exitCode=0 Dec 05 17:50:37 crc kubenswrapper[4961]: I1205 17:50:37.026877 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2mrxc" event={"ID":"2b26a944-dad9-45ea-b636-5f2ddaadc80d","Type":"ContainerDied","Data":"550dccdd6e207f6beb90fe639060c64f36976533ef5bdbced49367bc556e9957"} Dec 05 17:50:37 crc kubenswrapper[4961]: I1205 17:50:37.030228 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ghqpp" event={"ID":"0ec59b22-f0c6-4de1-a447-59eb40a7c89d","Type":"ContainerStarted","Data":"77b63205c0b9058f2486caf506c53645a7e591ccfeadb412cdd2905f65c6dcd5"} Dec 05 17:50:37 crc kubenswrapper[4961]: I1205 17:50:37.067990 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-db-sync-ghqpp" podStartSLOduration=2.152631842 podStartE2EDuration="38.067971942s" podCreationTimestamp="2025-12-05 17:49:59 +0000 UTC" firstStartedPulling="2025-12-05 17:50:00.40715929 +0000 UTC m=+1006.468309763" lastFinishedPulling="2025-12-05 17:50:36.32249939 +0000 UTC m=+1042.383649863" observedRunningTime="2025-12-05 17:50:37.05935925 +0000 UTC m=+1043.120509733" watchObservedRunningTime="2025-12-05 17:50:37.067971942 +0000 UTC m=+1043.129122415" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.367668 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.515616 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-combined-ca-bundle\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.515683 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2b5sp\" (UniqueName: \"kubernetes.io/projected/2b26a944-dad9-45ea-b636-5f2ddaadc80d-kube-api-access-2b5sp\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.515816 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-swiftconf\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.515850 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-ring-data-devices\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.515914 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-scripts\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.516008 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b26a944-dad9-45ea-b636-5f2ddaadc80d-etc-swift\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.516070 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-dispersionconf\") pod \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\" (UID: \"2b26a944-dad9-45ea-b636-5f2ddaadc80d\") " Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.516805 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2b26a944-dad9-45ea-b636-5f2ddaadc80d-etc-swift" (OuterVolumeSpecName: "etc-swift") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "etc-swift". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.517166 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-ring-data-devices" (OuterVolumeSpecName: "ring-data-devices") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "ring-data-devices". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.523429 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2b26a944-dad9-45ea-b636-5f2ddaadc80d-kube-api-access-2b5sp" (OuterVolumeSpecName: "kube-api-access-2b5sp") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "kube-api-access-2b5sp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.525511 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-dispersionconf" (OuterVolumeSpecName: "dispersionconf") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "dispersionconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.539163 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-swiftconf" (OuterVolumeSpecName: "swiftconf") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "swiftconf". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.541607 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.551901 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-scripts" (OuterVolumeSpecName: "scripts") pod "2b26a944-dad9-45ea-b636-5f2ddaadc80d" (UID: "2b26a944-dad9-45ea-b636-5f2ddaadc80d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618647 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618682 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2b5sp\" (UniqueName: \"kubernetes.io/projected/2b26a944-dad9-45ea-b636-5f2ddaadc80d-kube-api-access-2b5sp\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618697 4961 reconciler_common.go:293] "Volume detached for volume \"swiftconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-swiftconf\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618708 4961 reconciler_common.go:293] "Volume detached for volume \"ring-data-devices\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-ring-data-devices\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618728 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/2b26a944-dad9-45ea-b636-5f2ddaadc80d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618738 4961 reconciler_common.go:293] "Volume detached for volume \"etc-swift\" (UniqueName: \"kubernetes.io/empty-dir/2b26a944-dad9-45ea-b636-5f2ddaadc80d-etc-swift\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:38 crc kubenswrapper[4961]: I1205 17:50:38.618747 4961 reconciler_common.go:293] "Volume detached for volume \"dispersionconf\" (UniqueName: \"kubernetes.io/secret/2b26a944-dad9-45ea-b636-5f2ddaadc80d-dispersionconf\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:39 crc kubenswrapper[4961]: I1205 17:50:39.051220 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-ring-rebalance-2mrxc" event={"ID":"2b26a944-dad9-45ea-b636-5f2ddaadc80d","Type":"ContainerDied","Data":"ac5b2a1116d127222de7a15421acd6e5d097984f73bc8b68b744f2a57d922200"} Dec 05 17:50:39 crc kubenswrapper[4961]: I1205 17:50:39.051289 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ac5b2a1116d127222de7a15421acd6e5d097984f73bc8b68b744f2a57d922200" Dec 05 17:50:39 crc kubenswrapper[4961]: I1205 17:50:39.051335 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/swift-ring-rebalance-2mrxc" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.241103 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.545943 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.772590 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-create-6txbs"] Dec 05 17:50:41 crc kubenswrapper[4961]: E1205 17:50:41.774671 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="init" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.774695 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="init" Dec 05 17:50:41 crc kubenswrapper[4961]: E1205 17:50:41.774723 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3aca988-775e-408c-80f8-61a5ace9d645" containerName="ovn-config" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.774729 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3aca988-775e-408c-80f8-61a5ace9d645" containerName="ovn-config" Dec 05 17:50:41 crc kubenswrapper[4961]: E1205 17:50:41.774742 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2b26a944-dad9-45ea-b636-5f2ddaadc80d" containerName="swift-ring-rebalance" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.774749 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="2b26a944-dad9-45ea-b636-5f2ddaadc80d" containerName="swift-ring-rebalance" Dec 05 17:50:41 crc kubenswrapper[4961]: E1205 17:50:41.774770 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.774791 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.776876 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="50c2c6e1-8c4d-4464-a362-3e99192aa795" containerName="dnsmasq-dns" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.776929 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="2b26a944-dad9-45ea-b636-5f2ddaadc80d" containerName="swift-ring-rebalance" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.776945 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3aca988-775e-408c-80f8-61a5ace9d645" containerName="ovn-config" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.778592 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.801887 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6txbs"] Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.882599 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtlwh\" (UniqueName: \"kubernetes.io/projected/5209837f-e8f0-4db4-ba7d-2af20947be50-kube-api-access-mtlwh\") pod \"cinder-db-create-6txbs\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.883132 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5209837f-e8f0-4db4-ba7d-2af20947be50-operator-scripts\") pod \"cinder-db-create-6txbs\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.886444 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-create-wwxhc"] Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.890307 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.896890 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-wwxhc"] Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.908112 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-bf35-account-create-update-5fmjn"] Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.909229 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.911704 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-db-secret" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.913369 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-bf35-account-create-update-5fmjn"] Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.981620 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-0b2d-account-create-update-89tcd"] Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.982866 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.984544 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtlwh\" (UniqueName: \"kubernetes.io/projected/5209837f-e8f0-4db4-ba7d-2af20947be50-kube-api-access-mtlwh\") pod \"cinder-db-create-6txbs\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.984663 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b8kb6\" (UniqueName: \"kubernetes.io/projected/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-kube-api-access-b8kb6\") pod \"barbican-db-create-wwxhc\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.984691 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2673794d-8d1a-4b28-9988-65c916ae70b0-operator-scripts\") pod \"cinder-bf35-account-create-update-5fmjn\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.984789 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rtjvw\" (UniqueName: \"kubernetes.io/projected/2673794d-8d1a-4b28-9988-65c916ae70b0-kube-api-access-rtjvw\") pod \"cinder-bf35-account-create-update-5fmjn\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.984910 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-operator-scripts\") pod \"barbican-db-create-wwxhc\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.984961 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5209837f-e8f0-4db4-ba7d-2af20947be50-operator-scripts\") pod \"cinder-db-create-6txbs\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.986953 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5209837f-e8f0-4db4-ba7d-2af20947be50-operator-scripts\") pod \"cinder-db-create-6txbs\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:41 crc kubenswrapper[4961]: I1205 17:50:41.993978 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-db-secret" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.012656 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0b2d-account-create-update-89tcd"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.021459 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtlwh\" (UniqueName: \"kubernetes.io/projected/5209837f-e8f0-4db4-ba7d-2af20947be50-kube-api-access-mtlwh\") pod \"cinder-db-create-6txbs\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.031809 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-db-sync-dmt4c"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.033855 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.048616 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.048770 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.048896 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.049080 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pp6pr" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.054246 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-dmt4c"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.086697 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bck7p\" (UniqueName: \"kubernetes.io/projected/ea9126a1-ef8a-42ae-a94c-596b287fa74d-kube-api-access-bck7p\") pod \"barbican-0b2d-account-create-update-89tcd\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.086820 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-operator-scripts\") pod \"barbican-db-create-wwxhc\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.086880 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9126a1-ef8a-42ae-a94c-596b287fa74d-operator-scripts\") pod \"barbican-0b2d-account-create-update-89tcd\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.086919 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b8kb6\" (UniqueName: \"kubernetes.io/projected/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-kube-api-access-b8kb6\") pod \"barbican-db-create-wwxhc\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.086936 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2673794d-8d1a-4b28-9988-65c916ae70b0-operator-scripts\") pod \"cinder-bf35-account-create-update-5fmjn\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.086973 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rtjvw\" (UniqueName: \"kubernetes.io/projected/2673794d-8d1a-4b28-9988-65c916ae70b0-kube-api-access-rtjvw\") pod \"cinder-bf35-account-create-update-5fmjn\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.087838 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-operator-scripts\") pod \"barbican-db-create-wwxhc\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.088513 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2673794d-8d1a-4b28-9988-65c916ae70b0-operator-scripts\") pod \"cinder-bf35-account-create-update-5fmjn\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.105417 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rtjvw\" (UniqueName: \"kubernetes.io/projected/2673794d-8d1a-4b28-9988-65c916ae70b0-kube-api-access-rtjvw\") pod \"cinder-bf35-account-create-update-5fmjn\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.114063 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b8kb6\" (UniqueName: \"kubernetes.io/projected/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-kube-api-access-b8kb6\") pod \"barbican-db-create-wwxhc\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.146212 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.160376 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-create-ssvs5"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.161547 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.177370 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-ssvs5"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.188212 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9126a1-ef8a-42ae-a94c-596b287fa74d-operator-scripts\") pod \"barbican-0b2d-account-create-update-89tcd\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.188264 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-config-data\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.188296 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-combined-ca-bundle\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.188323 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kjxn2\" (UniqueName: \"kubernetes.io/projected/eaedc2cc-4238-4831-ad1a-1c260e8bec24-kube-api-access-kjxn2\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.188349 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bck7p\" (UniqueName: \"kubernetes.io/projected/ea9126a1-ef8a-42ae-a94c-596b287fa74d-kube-api-access-bck7p\") pod \"barbican-0b2d-account-create-update-89tcd\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.189439 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9126a1-ef8a-42ae-a94c-596b287fa74d-operator-scripts\") pod \"barbican-0b2d-account-create-update-89tcd\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.208348 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bck7p\" (UniqueName: \"kubernetes.io/projected/ea9126a1-ef8a-42ae-a94c-596b287fa74d-kube-api-access-bck7p\") pod \"barbican-0b2d-account-create-update-89tcd\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.209967 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.229138 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.254307 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-07f9-account-create-update-jcqf5"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.261422 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.268228 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-07f9-account-create-update-jcqf5"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.270542 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-db-secret" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.291592 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9aee3a36-f57d-4c81-9627-df1f4ea436be-operator-scripts\") pod \"neutron-db-create-ssvs5\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.291651 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zxzdv\" (UniqueName: \"kubernetes.io/projected/9aee3a36-f57d-4c81-9627-df1f4ea436be-kube-api-access-zxzdv\") pod \"neutron-db-create-ssvs5\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.291697 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-config-data\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.291738 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-combined-ca-bundle\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.291833 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kjxn2\" (UniqueName: \"kubernetes.io/projected/eaedc2cc-4238-4831-ad1a-1c260e8bec24-kube-api-access-kjxn2\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.297946 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-combined-ca-bundle\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.300460 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-config-data\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.307255 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.315172 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kjxn2\" (UniqueName: \"kubernetes.io/projected/eaedc2cc-4238-4831-ad1a-1c260e8bec24-kube-api-access-kjxn2\") pod \"keystone-db-sync-dmt4c\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.380443 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.397894 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/919d9712-f294-46aa-95e9-b1c166d3bf5a-operator-scripts\") pod \"neutron-07f9-account-create-update-jcqf5\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.398265 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9aee3a36-f57d-4c81-9627-df1f4ea436be-operator-scripts\") pod \"neutron-db-create-ssvs5\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.398290 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zxzdv\" (UniqueName: \"kubernetes.io/projected/9aee3a36-f57d-4c81-9627-df1f4ea436be-kube-api-access-zxzdv\") pod \"neutron-db-create-ssvs5\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.398344 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h8x5q\" (UniqueName: \"kubernetes.io/projected/919d9712-f294-46aa-95e9-b1c166d3bf5a-kube-api-access-h8x5q\") pod \"neutron-07f9-account-create-update-jcqf5\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.399330 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9aee3a36-f57d-4c81-9627-df1f4ea436be-operator-scripts\") pod \"neutron-db-create-ssvs5\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.417434 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zxzdv\" (UniqueName: \"kubernetes.io/projected/9aee3a36-f57d-4c81-9627-df1f4ea436be-kube-api-access-zxzdv\") pod \"neutron-db-create-ssvs5\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.491244 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.499646 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/919d9712-f294-46aa-95e9-b1c166d3bf5a-operator-scripts\") pod \"neutron-07f9-account-create-update-jcqf5\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.499722 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h8x5q\" (UniqueName: \"kubernetes.io/projected/919d9712-f294-46aa-95e9-b1c166d3bf5a-kube-api-access-h8x5q\") pod \"neutron-07f9-account-create-update-jcqf5\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.500841 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/919d9712-f294-46aa-95e9-b1c166d3bf5a-operator-scripts\") pod \"neutron-07f9-account-create-update-jcqf5\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.520615 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h8x5q\" (UniqueName: \"kubernetes.io/projected/919d9712-f294-46aa-95e9-b1c166d3bf5a-kube-api-access-h8x5q\") pod \"neutron-07f9-account-create-update-jcqf5\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.622144 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.809551 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-create-wwxhc"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.830445 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-create-6txbs"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.844366 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-0b2d-account-create-update-89tcd"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.888616 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-db-sync-dmt4c"] Dec 05 17:50:42 crc kubenswrapper[4961]: I1205 17:50:42.899515 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-bf35-account-create-update-5fmjn"] Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.033205 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-07f9-account-create-update-jcqf5"] Dec 05 17:50:43 crc kubenswrapper[4961]: W1205 17:50:43.033231 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod919d9712_f294_46aa_95e9_b1c166d3bf5a.slice/crio-e107bd53731bec57bb34975c16705da14b78c0541774589cf313551b40dc3a97 WatchSource:0}: Error finding container e107bd53731bec57bb34975c16705da14b78c0541774589cf313551b40dc3a97: Status 404 returned error can't find the container with id e107bd53731bec57bb34975c16705da14b78c0541774589cf313551b40dc3a97 Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.087115 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0b2d-account-create-update-89tcd" event={"ID":"ea9126a1-ef8a-42ae-a94c-596b287fa74d","Type":"ContainerStarted","Data":"e6629383cc1372abc39c16592e82152d19a71696b95b1c15f7bedcf90e7c4cd8"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.087985 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-07f9-account-create-update-jcqf5" event={"ID":"919d9712-f294-46aa-95e9-b1c166d3bf5a","Type":"ContainerStarted","Data":"e107bd53731bec57bb34975c16705da14b78c0541774589cf313551b40dc3a97"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.092610 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wwxhc" event={"ID":"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82","Type":"ContainerStarted","Data":"c7775548c61a86060018c3fa198cfb2dd6a5afa042f10c3609bc3d1fe578f90c"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.092657 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wwxhc" event={"ID":"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82","Type":"ContainerStarted","Data":"539219c61c65ed33d5c805c814ce2016722c8886aef5d40b984ef5be5ac8b685"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.096986 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmt4c" event={"ID":"eaedc2cc-4238-4831-ad1a-1c260e8bec24","Type":"ContainerStarted","Data":"c50f847bfa000b30de1044d871bfefaa7d7d38a86aa73f6574b9ec4c9bbef78e"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.102175 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-bf35-account-create-update-5fmjn" event={"ID":"2673794d-8d1a-4b28-9988-65c916ae70b0","Type":"ContainerStarted","Data":"2d4408694cfe7133bbfd49bcbbf3218e9a19669fa572bb41cdf50216aa2d8cc8"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.104008 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6txbs" event={"ID":"5209837f-e8f0-4db4-ba7d-2af20947be50","Type":"ContainerStarted","Data":"ce1d1257db7bf4af890bcf579537afadcddcd5e309fa270bf80f9b5904b3d593"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.104045 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6txbs" event={"ID":"5209837f-e8f0-4db4-ba7d-2af20947be50","Type":"ContainerStarted","Data":"2009e2e95079eca29831f8eb82297002d5a3f4d9c69b3a52e43a5e5c7be9c339"} Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.108885 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-create-wwxhc" podStartSLOduration=2.108868297 podStartE2EDuration="2.108868297s" podCreationTimestamp="2025-12-05 17:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:43.107447342 +0000 UTC m=+1049.168597815" watchObservedRunningTime="2025-12-05 17:50:43.108868297 +0000 UTC m=+1049.170018770" Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.148523 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-create-6txbs" podStartSLOduration=2.148504161 podStartE2EDuration="2.148504161s" podCreationTimestamp="2025-12-05 17:50:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:43.121170789 +0000 UTC m=+1049.182321262" watchObservedRunningTime="2025-12-05 17:50:43.148504161 +0000 UTC m=+1049.209654634" Dec 05 17:50:43 crc kubenswrapper[4961]: I1205 17:50:43.172645 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-create-ssvs5"] Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.115545 4961 generic.go:334] "Generic (PLEG): container finished" podID="2673794d-8d1a-4b28-9988-65c916ae70b0" containerID="1e81862d6f1a3abea7d143ce5db4b95b941b0aab8e9996f7be4a3c30bc693ed7" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.116083 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-bf35-account-create-update-5fmjn" event={"ID":"2673794d-8d1a-4b28-9988-65c916ae70b0","Type":"ContainerDied","Data":"1e81862d6f1a3abea7d143ce5db4b95b941b0aab8e9996f7be4a3c30bc693ed7"} Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.120391 4961 generic.go:334] "Generic (PLEG): container finished" podID="5209837f-e8f0-4db4-ba7d-2af20947be50" containerID="ce1d1257db7bf4af890bcf579537afadcddcd5e309fa270bf80f9b5904b3d593" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.120436 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6txbs" event={"ID":"5209837f-e8f0-4db4-ba7d-2af20947be50","Type":"ContainerDied","Data":"ce1d1257db7bf4af890bcf579537afadcddcd5e309fa270bf80f9b5904b3d593"} Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.122637 4961 generic.go:334] "Generic (PLEG): container finished" podID="9aee3a36-f57d-4c81-9627-df1f4ea436be" containerID="392c07a98fdc0c8fc13fd55b762070b990a1ec7f43be6109b96e28a26d14172d" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.122696 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ssvs5" event={"ID":"9aee3a36-f57d-4c81-9627-df1f4ea436be","Type":"ContainerDied","Data":"392c07a98fdc0c8fc13fd55b762070b990a1ec7f43be6109b96e28a26d14172d"} Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.122721 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ssvs5" event={"ID":"9aee3a36-f57d-4c81-9627-df1f4ea436be","Type":"ContainerStarted","Data":"eae4838456cce7d6da19ff56867f196a2816cb555531ff1ba118676fe0c8e0cf"} Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.125168 4961 generic.go:334] "Generic (PLEG): container finished" podID="ea9126a1-ef8a-42ae-a94c-596b287fa74d" containerID="266fab2cad3e0fba693e0eed0aa20a322bbe1c902e28ddd8d9d5bfcd17ebc458" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.125332 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0b2d-account-create-update-89tcd" event={"ID":"ea9126a1-ef8a-42ae-a94c-596b287fa74d","Type":"ContainerDied","Data":"266fab2cad3e0fba693e0eed0aa20a322bbe1c902e28ddd8d9d5bfcd17ebc458"} Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.129319 4961 generic.go:334] "Generic (PLEG): container finished" podID="919d9712-f294-46aa-95e9-b1c166d3bf5a" containerID="b3ef8b94d3f80ab569bdc0cfc4a932c06ec7a3c41710f129d9a36eb373829953" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.129403 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-07f9-account-create-update-jcqf5" event={"ID":"919d9712-f294-46aa-95e9-b1c166d3bf5a","Type":"ContainerDied","Data":"b3ef8b94d3f80ab569bdc0cfc4a932c06ec7a3c41710f129d9a36eb373829953"} Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.131558 4961 generic.go:334] "Generic (PLEG): container finished" podID="1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" containerID="c7775548c61a86060018c3fa198cfb2dd6a5afa042f10c3609bc3d1fe578f90c" exitCode=0 Dec 05 17:50:44 crc kubenswrapper[4961]: I1205 17:50:44.131597 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wwxhc" event={"ID":"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82","Type":"ContainerDied","Data":"c7775548c61a86060018c3fa198cfb2dd6a5afa042f10c3609bc3d1fe578f90c"} Dec 05 17:50:46 crc kubenswrapper[4961]: I1205 17:50:46.152878 4961 generic.go:334] "Generic (PLEG): container finished" podID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" containerID="77b63205c0b9058f2486caf506c53645a7e591ccfeadb412cdd2905f65c6dcd5" exitCode=0 Dec 05 17:50:46 crc kubenswrapper[4961]: I1205 17:50:46.152963 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ghqpp" event={"ID":"0ec59b22-f0c6-4de1-a447-59eb40a7c89d","Type":"ContainerDied","Data":"77b63205c0b9058f2486caf506c53645a7e591ccfeadb412cdd2905f65c6dcd5"} Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.672597 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.678995 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ghqpp" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.687033 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.764856 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.770495 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.787406 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.794117 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/919d9712-f294-46aa-95e9-b1c166d3bf5a-operator-scripts\") pod \"919d9712-f294-46aa-95e9-b1c166d3bf5a\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.794474 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-operator-scripts\") pod \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.794624 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnll2\" (UniqueName: \"kubernetes.io/projected/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-kube-api-access-cnll2\") pod \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.794524 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/919d9712-f294-46aa-95e9-b1c166d3bf5a-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "919d9712-f294-46aa-95e9-b1c166d3bf5a" (UID: "919d9712-f294-46aa-95e9-b1c166d3bf5a"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.794834 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" (UID: "1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.794851 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b8kb6\" (UniqueName: \"kubernetes.io/projected/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-kube-api-access-b8kb6\") pod \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\" (UID: \"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.795187 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-db-sync-config-data\") pod \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.795293 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-config-data\") pod \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.795436 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-combined-ca-bundle\") pod \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\" (UID: \"0ec59b22-f0c6-4de1-a447-59eb40a7c89d\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.795563 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h8x5q\" (UniqueName: \"kubernetes.io/projected/919d9712-f294-46aa-95e9-b1c166d3bf5a-kube-api-access-h8x5q\") pod \"919d9712-f294-46aa-95e9-b1c166d3bf5a\" (UID: \"919d9712-f294-46aa-95e9-b1c166d3bf5a\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.796240 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/919d9712-f294-46aa-95e9-b1c166d3bf5a-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.796365 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.800645 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.803293 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "0ec59b22-f0c6-4de1-a447-59eb40a7c89d" (UID: "0ec59b22-f0c6-4de1-a447-59eb40a7c89d"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.804548 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-kube-api-access-cnll2" (OuterVolumeSpecName: "kube-api-access-cnll2") pod "0ec59b22-f0c6-4de1-a447-59eb40a7c89d" (UID: "0ec59b22-f0c6-4de1-a447-59eb40a7c89d"). InnerVolumeSpecName "kube-api-access-cnll2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.810380 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/919d9712-f294-46aa-95e9-b1c166d3bf5a-kube-api-access-h8x5q" (OuterVolumeSpecName: "kube-api-access-h8x5q") pod "919d9712-f294-46aa-95e9-b1c166d3bf5a" (UID: "919d9712-f294-46aa-95e9-b1c166d3bf5a"). InnerVolumeSpecName "kube-api-access-h8x5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.823552 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-kube-api-access-b8kb6" (OuterVolumeSpecName: "kube-api-access-b8kb6") pod "1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" (UID: "1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82"). InnerVolumeSpecName "kube-api-access-b8kb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.847921 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0ec59b22-f0c6-4de1-a447-59eb40a7c89d" (UID: "0ec59b22-f0c6-4de1-a447-59eb40a7c89d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.885919 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-config-data" (OuterVolumeSpecName: "config-data") pod "0ec59b22-f0c6-4de1-a447-59eb40a7c89d" (UID: "0ec59b22-f0c6-4de1-a447-59eb40a7c89d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897354 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bck7p\" (UniqueName: \"kubernetes.io/projected/ea9126a1-ef8a-42ae-a94c-596b287fa74d-kube-api-access-bck7p\") pod \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897407 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rtjvw\" (UniqueName: \"kubernetes.io/projected/2673794d-8d1a-4b28-9988-65c916ae70b0-kube-api-access-rtjvw\") pod \"2673794d-8d1a-4b28-9988-65c916ae70b0\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897432 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5209837f-e8f0-4db4-ba7d-2af20947be50-operator-scripts\") pod \"5209837f-e8f0-4db4-ba7d-2af20947be50\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897484 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9aee3a36-f57d-4c81-9627-df1f4ea436be-operator-scripts\") pod \"9aee3a36-f57d-4c81-9627-df1f4ea436be\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897567 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtlwh\" (UniqueName: \"kubernetes.io/projected/5209837f-e8f0-4db4-ba7d-2af20947be50-kube-api-access-mtlwh\") pod \"5209837f-e8f0-4db4-ba7d-2af20947be50\" (UID: \"5209837f-e8f0-4db4-ba7d-2af20947be50\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897593 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9126a1-ef8a-42ae-a94c-596b287fa74d-operator-scripts\") pod \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\" (UID: \"ea9126a1-ef8a-42ae-a94c-596b287fa74d\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897621 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2673794d-8d1a-4b28-9988-65c916ae70b0-operator-scripts\") pod \"2673794d-8d1a-4b28-9988-65c916ae70b0\" (UID: \"2673794d-8d1a-4b28-9988-65c916ae70b0\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.897681 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zxzdv\" (UniqueName: \"kubernetes.io/projected/9aee3a36-f57d-4c81-9627-df1f4ea436be-kube-api-access-zxzdv\") pod \"9aee3a36-f57d-4c81-9627-df1f4ea436be\" (UID: \"9aee3a36-f57d-4c81-9627-df1f4ea436be\") " Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.898591 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2673794d-8d1a-4b28-9988-65c916ae70b0-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2673794d-8d1a-4b28-9988-65c916ae70b0" (UID: "2673794d-8d1a-4b28-9988-65c916ae70b0"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.898604 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea9126a1-ef8a-42ae-a94c-596b287fa74d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ea9126a1-ef8a-42ae-a94c-596b287fa74d" (UID: "ea9126a1-ef8a-42ae-a94c-596b287fa74d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.898600 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5209837f-e8f0-4db4-ba7d-2af20947be50-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5209837f-e8f0-4db4-ba7d-2af20947be50" (UID: "5209837f-e8f0-4db4-ba7d-2af20947be50"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.898764 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9aee3a36-f57d-4c81-9627-df1f4ea436be-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9aee3a36-f57d-4c81-9627-df1f4ea436be" (UID: "9aee3a36-f57d-4c81-9627-df1f4ea436be"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.898757 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.898978 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.899060 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h8x5q\" (UniqueName: \"kubernetes.io/projected/919d9712-f294-46aa-95e9-b1c166d3bf5a-kube-api-access-h8x5q\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.899179 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea9126a1-ef8a-42ae-a94c-596b287fa74d-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.899250 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2673794d-8d1a-4b28-9988-65c916ae70b0-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.899310 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnll2\" (UniqueName: \"kubernetes.io/projected/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-kube-api-access-cnll2\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.899372 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b8kb6\" (UniqueName: \"kubernetes.io/projected/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82-kube-api-access-b8kb6\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.899436 4961 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/0ec59b22-f0c6-4de1-a447-59eb40a7c89d-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.900671 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2673794d-8d1a-4b28-9988-65c916ae70b0-kube-api-access-rtjvw" (OuterVolumeSpecName: "kube-api-access-rtjvw") pod "2673794d-8d1a-4b28-9988-65c916ae70b0" (UID: "2673794d-8d1a-4b28-9988-65c916ae70b0"). InnerVolumeSpecName "kube-api-access-rtjvw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.901205 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5209837f-e8f0-4db4-ba7d-2af20947be50-kube-api-access-mtlwh" (OuterVolumeSpecName: "kube-api-access-mtlwh") pod "5209837f-e8f0-4db4-ba7d-2af20947be50" (UID: "5209837f-e8f0-4db4-ba7d-2af20947be50"). InnerVolumeSpecName "kube-api-access-mtlwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.902120 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9aee3a36-f57d-4c81-9627-df1f4ea436be-kube-api-access-zxzdv" (OuterVolumeSpecName: "kube-api-access-zxzdv") pod "9aee3a36-f57d-4c81-9627-df1f4ea436be" (UID: "9aee3a36-f57d-4c81-9627-df1f4ea436be"). InnerVolumeSpecName "kube-api-access-zxzdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:47 crc kubenswrapper[4961]: I1205 17:50:47.902714 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea9126a1-ef8a-42ae-a94c-596b287fa74d-kube-api-access-bck7p" (OuterVolumeSpecName: "kube-api-access-bck7p") pod "ea9126a1-ef8a-42ae-a94c-596b287fa74d" (UID: "ea9126a1-ef8a-42ae-a94c-596b287fa74d"). InnerVolumeSpecName "kube-api-access-bck7p". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.001604 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rtjvw\" (UniqueName: \"kubernetes.io/projected/2673794d-8d1a-4b28-9988-65c916ae70b0-kube-api-access-rtjvw\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.001642 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5209837f-e8f0-4db4-ba7d-2af20947be50-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.001652 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9aee3a36-f57d-4c81-9627-df1f4ea436be-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.001661 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtlwh\" (UniqueName: \"kubernetes.io/projected/5209837f-e8f0-4db4-ba7d-2af20947be50-kube-api-access-mtlwh\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.001671 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zxzdv\" (UniqueName: \"kubernetes.io/projected/9aee3a36-f57d-4c81-9627-df1f4ea436be-kube-api-access-zxzdv\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.001681 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bck7p\" (UniqueName: \"kubernetes.io/projected/ea9126a1-ef8a-42ae-a94c-596b287fa74d-kube-api-access-bck7p\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.169148 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-create-wwxhc" event={"ID":"1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82","Type":"ContainerDied","Data":"539219c61c65ed33d5c805c814ce2016722c8886aef5d40b984ef5be5ac8b685"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.169190 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="539219c61c65ed33d5c805c814ce2016722c8886aef5d40b984ef5be5ac8b685" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.169257 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-create-wwxhc" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.176279 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmt4c" event={"ID":"eaedc2cc-4238-4831-ad1a-1c260e8bec24","Type":"ContainerStarted","Data":"da9a97b4359f1460049f4a5cc7ea0b44b1acfd3a1a9492323cae3d97df6ea6ea"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.185458 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-bf35-account-create-update-5fmjn" event={"ID":"2673794d-8d1a-4b28-9988-65c916ae70b0","Type":"ContainerDied","Data":"2d4408694cfe7133bbfd49bcbbf3218e9a19669fa572bb41cdf50216aa2d8cc8"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.185516 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2d4408694cfe7133bbfd49bcbbf3218e9a19669fa572bb41cdf50216aa2d8cc8" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.185515 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-bf35-account-create-update-5fmjn" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.187451 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-create-6txbs" event={"ID":"5209837f-e8f0-4db4-ba7d-2af20947be50","Type":"ContainerDied","Data":"2009e2e95079eca29831f8eb82297002d5a3f4d9c69b3a52e43a5e5c7be9c339"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.187485 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2009e2e95079eca29831f8eb82297002d5a3f4d9c69b3a52e43a5e5c7be9c339" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.187466 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-create-6txbs" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.189523 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-create-ssvs5" event={"ID":"9aee3a36-f57d-4c81-9627-df1f4ea436be","Type":"ContainerDied","Data":"eae4838456cce7d6da19ff56867f196a2816cb555531ff1ba118676fe0c8e0cf"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.189566 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eae4838456cce7d6da19ff56867f196a2816cb555531ff1ba118676fe0c8e0cf" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.189750 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-create-ssvs5" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.199658 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-0b2d-account-create-update-89tcd" event={"ID":"ea9126a1-ef8a-42ae-a94c-596b287fa74d","Type":"ContainerDied","Data":"e6629383cc1372abc39c16592e82152d19a71696b95b1c15f7bedcf90e7c4cd8"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.199704 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e6629383cc1372abc39c16592e82152d19a71696b95b1c15f7bedcf90e7c4cd8" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.199765 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-0b2d-account-create-update-89tcd" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.210824 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-07f9-account-create-update-jcqf5" event={"ID":"919d9712-f294-46aa-95e9-b1c166d3bf5a","Type":"ContainerDied","Data":"e107bd53731bec57bb34975c16705da14b78c0541774589cf313551b40dc3a97"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.210877 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e107bd53731bec57bb34975c16705da14b78c0541774589cf313551b40dc3a97" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.210882 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-07f9-account-create-update-jcqf5" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.212637 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-db-sync-ghqpp" event={"ID":"0ec59b22-f0c6-4de1-a447-59eb40a7c89d","Type":"ContainerDied","Data":"320705fb153610b60b8b92073531f367de5c2b21c0e53d556eef64ab5df80098"} Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.212683 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="320705fb153610b60b8b92073531f367de5c2b21c0e53d556eef64ab5df80098" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.212712 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-db-sync-ghqpp" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.217054 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-db-sync-dmt4c" podStartSLOduration=2.608133891 podStartE2EDuration="7.217031301s" podCreationTimestamp="2025-12-05 17:50:41 +0000 UTC" firstStartedPulling="2025-12-05 17:50:42.911988861 +0000 UTC m=+1048.973139334" lastFinishedPulling="2025-12-05 17:50:47.520886271 +0000 UTC m=+1053.582036744" observedRunningTime="2025-12-05 17:50:48.203385956 +0000 UTC m=+1054.264536469" watchObservedRunningTime="2025-12-05 17:50:48.217031301 +0000 UTC m=+1054.278181784" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.612920 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2sqmx"] Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613245 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9aee3a36-f57d-4c81-9627-df1f4ea436be" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613262 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9aee3a36-f57d-4c81-9627-df1f4ea436be" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613273 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5209837f-e8f0-4db4-ba7d-2af20947be50" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613282 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5209837f-e8f0-4db4-ba7d-2af20947be50" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613301 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613307 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613319 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" containerName="glance-db-sync" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613325 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" containerName="glance-db-sync" Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613341 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2673794d-8d1a-4b28-9988-65c916ae70b0" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613347 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="2673794d-8d1a-4b28-9988-65c916ae70b0" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613359 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="919d9712-f294-46aa-95e9-b1c166d3bf5a" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613365 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="919d9712-f294-46aa-95e9-b1c166d3bf5a" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: E1205 17:50:48.613378 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea9126a1-ef8a-42ae-a94c-596b287fa74d" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613384 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea9126a1-ef8a-42ae-a94c-596b287fa74d" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613519 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea9126a1-ef8a-42ae-a94c-596b287fa74d" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613532 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" containerName="glance-db-sync" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613539 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613550 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="2673794d-8d1a-4b28-9988-65c916ae70b0" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613559 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="919d9712-f294-46aa-95e9-b1c166d3bf5a" containerName="mariadb-account-create-update" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613567 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9aee3a36-f57d-4c81-9627-df1f4ea436be" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.613581 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5209837f-e8f0-4db4-ba7d-2af20947be50" containerName="mariadb-database-create" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.614465 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.635421 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2sqmx"] Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.712086 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.712145 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s7l55\" (UniqueName: \"kubernetes.io/projected/06c57cab-2995-4dd3-bd4a-9cee5adeb528-kube-api-access-s7l55\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.712269 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-dns-svc\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.712311 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-config\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.712337 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.813603 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-dns-svc\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.814003 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-config\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.814049 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.814101 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.814152 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s7l55\" (UniqueName: \"kubernetes.io/projected/06c57cab-2995-4dd3-bd4a-9cee5adeb528-kube-api-access-s7l55\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.814762 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-dns-svc\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.814876 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-config\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.815232 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-nb\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.815904 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-sb\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.839094 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s7l55\" (UniqueName: \"kubernetes.io/projected/06c57cab-2995-4dd3-bd4a-9cee5adeb528-kube-api-access-s7l55\") pod \"dnsmasq-dns-74dc88fc-2sqmx\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:48 crc kubenswrapper[4961]: I1205 17:50:48.939410 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:49 crc kubenswrapper[4961]: I1205 17:50:49.386812 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2sqmx"] Dec 05 17:50:49 crc kubenswrapper[4961]: W1205 17:50:49.388831 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod06c57cab_2995_4dd3_bd4a_9cee5adeb528.slice/crio-c4ad4535e2fc88b75c0138516a939f67ab6937d08b62394b937f3563bf34d0a3 WatchSource:0}: Error finding container c4ad4535e2fc88b75c0138516a939f67ab6937d08b62394b937f3563bf34d0a3: Status 404 returned error can't find the container with id c4ad4535e2fc88b75c0138516a939f67ab6937d08b62394b937f3563bf34d0a3 Dec 05 17:50:50 crc kubenswrapper[4961]: I1205 17:50:50.235843 4961 generic.go:334] "Generic (PLEG): container finished" podID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerID="6cddd34349518ac198931cd6a455617115cc24a446be8b3fea08c1864a956f6a" exitCode=0 Dec 05 17:50:50 crc kubenswrapper[4961]: I1205 17:50:50.235927 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" event={"ID":"06c57cab-2995-4dd3-bd4a-9cee5adeb528","Type":"ContainerDied","Data":"6cddd34349518ac198931cd6a455617115cc24a446be8b3fea08c1864a956f6a"} Dec 05 17:50:50 crc kubenswrapper[4961]: I1205 17:50:50.235975 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" event={"ID":"06c57cab-2995-4dd3-bd4a-9cee5adeb528","Type":"ContainerStarted","Data":"c4ad4535e2fc88b75c0138516a939f67ab6937d08b62394b937f3563bf34d0a3"} Dec 05 17:50:51 crc kubenswrapper[4961]: I1205 17:50:51.245710 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" event={"ID":"06c57cab-2995-4dd3-bd4a-9cee5adeb528","Type":"ContainerStarted","Data":"1a46393f02e3a85988c0e741e5fccab1ce0e88c97a7415a499ca6bccbb072e17"} Dec 05 17:50:51 crc kubenswrapper[4961]: I1205 17:50:51.246149 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:51 crc kubenswrapper[4961]: I1205 17:50:51.279986 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" podStartSLOduration=3.279918537 podStartE2EDuration="3.279918537s" podCreationTimestamp="2025-12-05 17:50:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:51.268565118 +0000 UTC m=+1057.329715651" watchObservedRunningTime="2025-12-05 17:50:51.279918537 +0000 UTC m=+1057.341069010" Dec 05 17:50:52 crc kubenswrapper[4961]: I1205 17:50:52.255365 4961 generic.go:334] "Generic (PLEG): container finished" podID="eaedc2cc-4238-4831-ad1a-1c260e8bec24" containerID="da9a97b4359f1460049f4a5cc7ea0b44b1acfd3a1a9492323cae3d97df6ea6ea" exitCode=0 Dec 05 17:50:52 crc kubenswrapper[4961]: I1205 17:50:52.255427 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmt4c" event={"ID":"eaedc2cc-4238-4831-ad1a-1c260e8bec24","Type":"ContainerDied","Data":"da9a97b4359f1460049f4a5cc7ea0b44b1acfd3a1a9492323cae3d97df6ea6ea"} Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.575373 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.590189 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kjxn2\" (UniqueName: \"kubernetes.io/projected/eaedc2cc-4238-4831-ad1a-1c260e8bec24-kube-api-access-kjxn2\") pod \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.590344 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-combined-ca-bundle\") pod \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.590468 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-config-data\") pod \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\" (UID: \"eaedc2cc-4238-4831-ad1a-1c260e8bec24\") " Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.598234 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaedc2cc-4238-4831-ad1a-1c260e8bec24-kube-api-access-kjxn2" (OuterVolumeSpecName: "kube-api-access-kjxn2") pod "eaedc2cc-4238-4831-ad1a-1c260e8bec24" (UID: "eaedc2cc-4238-4831-ad1a-1c260e8bec24"). InnerVolumeSpecName "kube-api-access-kjxn2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.622554 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "eaedc2cc-4238-4831-ad1a-1c260e8bec24" (UID: "eaedc2cc-4238-4831-ad1a-1c260e8bec24"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.663502 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-config-data" (OuterVolumeSpecName: "config-data") pod "eaedc2cc-4238-4831-ad1a-1c260e8bec24" (UID: "eaedc2cc-4238-4831-ad1a-1c260e8bec24"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.692822 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.692865 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/eaedc2cc-4238-4831-ad1a-1c260e8bec24-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:53 crc kubenswrapper[4961]: I1205 17:50:53.692882 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kjxn2\" (UniqueName: \"kubernetes.io/projected/eaedc2cc-4238-4831-ad1a-1c260e8bec24-kube-api-access-kjxn2\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.270445 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-db-sync-dmt4c" event={"ID":"eaedc2cc-4238-4831-ad1a-1c260e8bec24","Type":"ContainerDied","Data":"c50f847bfa000b30de1044d871bfefaa7d7d38a86aa73f6574b9ec4c9bbef78e"} Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.270488 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-db-sync-dmt4c" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.270500 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="c50f847bfa000b30de1044d871bfefaa7d7d38a86aa73f6574b9ec4c9bbef78e" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.528145 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2sqmx"] Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.528702 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerName="dnsmasq-dns" containerID="cri-o://1a46393f02e3a85988c0e741e5fccab1ce0e88c97a7415a499ca6bccbb072e17" gracePeriod=10 Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.565146 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-rxkp9"] Dec 05 17:50:54 crc kubenswrapper[4961]: E1205 17:50:54.565585 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaedc2cc-4238-4831-ad1a-1c260e8bec24" containerName="keystone-db-sync" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.565613 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaedc2cc-4238-4831-ad1a-1c260e8bec24" containerName="keystone-db-sync" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.565919 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaedc2cc-4238-4831-ad1a-1c260e8bec24" containerName="keystone-db-sync" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.566583 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.572804 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.576065 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.576161 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.576434 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.578454 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-rxkp9"] Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.588096 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pp6pr" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.594525 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-nfk5t"] Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.602520 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.607762 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-combined-ca-bundle\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.607832 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-scripts\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.607930 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-credential-keys\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.607972 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-fernet-keys\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.608031 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s4xj7\" (UniqueName: \"kubernetes.io/projected/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-kube-api-access-s4xj7\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.608077 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-config-data\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.627834 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-nfk5t"] Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.716759 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-config\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.716860 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s4xj7\" (UniqueName: \"kubernetes.io/projected/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-kube-api-access-s4xj7\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.716896 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-nb\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.716929 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-config-data\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.716953 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jcpbj\" (UniqueName: \"kubernetes.io/projected/da25d01c-6513-43e2-aa19-5c380434494c-kube-api-access-jcpbj\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.717200 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-combined-ca-bundle\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.717236 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-scripts\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.717381 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-credential-keys\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.717409 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-dns-svc\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.717453 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-fernet-keys\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.717496 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-sb\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.723478 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-credential-keys\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.728727 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-scripts\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.738436 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-combined-ca-bundle\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.742618 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-fernet-keys\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.749206 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-config-data\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.760691 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-d5754c6c5-dtppz"] Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.763328 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.783196 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-config-data" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.783217 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.783404 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"horizon-scripts" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.808201 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"horizon-horizon-dockercfg-vzmg9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818657 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1004621a-ebc0-4b35-9a52-07089f68e790-horizon-secret-key\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818722 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-nb\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818748 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jcpbj\" (UniqueName: \"kubernetes.io/projected/da25d01c-6513-43e2-aa19-5c380434494c-kube-api-access-jcpbj\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818819 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-scripts\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818855 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bf249\" (UniqueName: \"kubernetes.io/projected/1004621a-ebc0-4b35-9a52-07089f68e790-kube-api-access-bf249\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818894 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1004621a-ebc0-4b35-9a52-07089f68e790-logs\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818915 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-dns-svc\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818945 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-sb\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818959 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-config\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.818992 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-config-data\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.820122 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-nb\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.820737 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-dns-svc\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.820870 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-sb\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.822900 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-config\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.826659 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s4xj7\" (UniqueName: \"kubernetes.io/projected/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-kube-api-access-s4xj7\") pod \"keystone-bootstrap-rxkp9\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.836861 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5754c6c5-dtppz"] Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.891296 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.898275 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jcpbj\" (UniqueName: \"kubernetes.io/projected/da25d01c-6513-43e2-aa19-5c380434494c-kube-api-access-jcpbj\") pod \"dnsmasq-dns-7d5679f497-nfk5t\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.920078 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-scripts\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.920180 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bf249\" (UniqueName: \"kubernetes.io/projected/1004621a-ebc0-4b35-9a52-07089f68e790-kube-api-access-bf249\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.920242 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1004621a-ebc0-4b35-9a52-07089f68e790-logs\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.920310 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-config-data\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.920343 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1004621a-ebc0-4b35-9a52-07089f68e790-horizon-secret-key\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.921547 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1004621a-ebc0-4b35-9a52-07089f68e790-logs\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.922080 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-scripts\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.923842 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-config-data\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:54 crc kubenswrapper[4961]: I1205 17:50:54.936340 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1004621a-ebc0-4b35-9a52-07089f68e790-horizon-secret-key\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.013361 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.024550 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-db-sync-sd82p"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.036988 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.083903 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.084103 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.084228 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-99cns" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.087827 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-sd82p"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.089652 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bf249\" (UniqueName: \"kubernetes.io/projected/1004621a-ebc0-4b35-9a52-07089f68e790-kube-api-access-bf249\") pod \"horizon-d5754c6c5-dtppz\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.127520 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-db-sync-pvww2"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.127933 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jvz5q\" (UniqueName: \"kubernetes.io/projected/b58877bc-157d-4919-9418-e5b306dff028-kube-api-access-jvz5q\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.128051 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-combined-ca-bundle\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.128099 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-config\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.128966 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.141160 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.152043 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.157728 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pvww2"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.158551 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-w6tdz" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.221598 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230706 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-db-sync-config-data\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230786 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62cd878c-721b-46b4-87bb-1573a9fcf6d9-etc-machine-id\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230823 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-combined-ca-bundle\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230845 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-combined-ca-bundle\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230871 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-config-data\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230894 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-scripts\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230918 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-config\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.230971 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jvz5q\" (UniqueName: \"kubernetes.io/projected/b58877bc-157d-4919-9418-e5b306dff028-kube-api-access-jvz5q\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.231020 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rjqbl\" (UniqueName: \"kubernetes.io/projected/62cd878c-721b-46b4-87bb-1573a9fcf6d9-kube-api-access-rjqbl\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.233986 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.234089 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-combined-ca-bundle\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.237080 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-config\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.245168 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.245375 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.254343 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-db-sync-k7c2f"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.255469 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.268707 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-config-data" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.268866 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-placement-dockercfg-c9x4h" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.268973 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"placement-scripts" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.286977 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.320283 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jvz5q\" (UniqueName: \"kubernetes.io/projected/b58877bc-157d-4919-9418-e5b306dff028-kube-api-access-jvz5q\") pod \"neutron-db-sync-sd82p\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332337 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62cd878c-721b-46b4-87bb-1573a9fcf6d9-etc-machine-id\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332379 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332401 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-combined-ca-bundle\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332422 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332442 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-config-data\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332462 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-combined-ca-bundle\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332490 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-config-data\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332515 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-log-httpd\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332529 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-scripts\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332556 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-scripts\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332572 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-run-httpd\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332592 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-scripts\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332617 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ltz74\" (UniqueName: \"kubernetes.io/projected/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-kube-api-access-ltz74\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332639 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125fc58a-f251-4b81-98e7-eca6a2c72a8e-logs\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332657 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-config-data\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332700 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rjqbl\" (UniqueName: \"kubernetes.io/projected/62cd878c-721b-46b4-87bb-1573a9fcf6d9-kube-api-access-rjqbl\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332719 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jnvd4\" (UniqueName: \"kubernetes.io/projected/125fc58a-f251-4b81-98e7-eca6a2c72a8e-kube-api-access-jnvd4\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.332737 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-db-sync-config-data\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.336122 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-db-sync-config-data\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.336432 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62cd878c-721b-46b4-87bb-1573a9fcf6d9-etc-machine-id\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.347031 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-config-data\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.350870 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-combined-ca-bundle\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.352222 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-scripts\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.352309 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.360972 4961 generic.go:334] "Generic (PLEG): container finished" podID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerID="1a46393f02e3a85988c0e741e5fccab1ce0e88c97a7415a499ca6bccbb072e17" exitCode=0 Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.361014 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" event={"ID":"06c57cab-2995-4dd3-bd4a-9cee5adeb528","Type":"ContainerDied","Data":"1a46393f02e3a85988c0e741e5fccab1ce0e88c97a7415a499ca6bccbb072e17"} Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.375600 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-k7c2f"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.398314 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rjqbl\" (UniqueName: \"kubernetes.io/projected/62cd878c-721b-46b4-87bb-1573a9fcf6d9-kube-api-access-rjqbl\") pod \"cinder-db-sync-pvww2\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.398395 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-dd8dcd4b5-hjh8n"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.404675 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.421291 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sd82p" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.427324 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-nfk5t"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435753 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-log-httpd\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435822 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-horizon-secret-key\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435850 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-scripts\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435868 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-run-httpd\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435890 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-scripts\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435919 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-scripts\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435938 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ltz74\" (UniqueName: \"kubernetes.io/projected/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-kube-api-access-ltz74\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435961 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125fc58a-f251-4b81-98e7-eca6a2c72a8e-logs\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.435981 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-config-data\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436027 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jnvd4\" (UniqueName: \"kubernetes.io/projected/125fc58a-f251-4b81-98e7-eca6a2c72a8e-kube-api-access-jnvd4\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436054 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pzz4j\" (UniqueName: \"kubernetes.io/projected/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-kube-api-access-pzz4j\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436076 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-logs\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436092 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436112 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-combined-ca-bundle\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436128 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436149 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-config-data\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.436165 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-config-data\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.444910 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-log-httpd\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.452456 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-config-data\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.453381 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.456391 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-scripts\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.457765 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-run-httpd\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.458197 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pvww2" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.458563 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125fc58a-f251-4b81-98e7-eca6a2c72a8e-logs\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.460616 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.471089 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-config-data\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.473246 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-combined-ca-bundle\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.474810 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-scripts\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.494914 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dd8dcd4b5-hjh8n"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.510047 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ltz74\" (UniqueName: \"kubernetes.io/projected/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-kube-api-access-ltz74\") pod \"ceilometer-0\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.522208 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jnvd4\" (UniqueName: \"kubernetes.io/projected/125fc58a-f251-4b81-98e7-eca6a2c72a8e-kube-api-access-jnvd4\") pod \"placement-db-sync-k7c2f\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.533147 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-db-sync-9q2wd"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.535257 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.538155 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-wmfbp" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.540549 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-config-data\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.540626 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-horizon-secret-key\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.541411 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.542824 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-config-data\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.545812 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-scripts\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.546002 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pzz4j\" (UniqueName: \"kubernetes.io/projected/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-kube-api-access-pzz4j\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.546049 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-logs\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.546416 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-logs\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.546720 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-scripts\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.549668 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-56798b757f-lrbxr"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.550421 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-horizon-secret-key\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.562578 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.580804 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pzz4j\" (UniqueName: \"kubernetes.io/projected/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-kube-api-access-pzz4j\") pod \"horizon-dd8dcd4b5-hjh8n\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.601384 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9q2wd"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.602132 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.607371 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-lrbxr"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.630890 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.639715 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7c2f" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.640364 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.650137 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.650275 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.650476 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f5xbg" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.650516 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.658326 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659491 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5c2pv\" (UniqueName: \"kubernetes.io/projected/884f8917-c720-4359-96d2-155b85958898-kube-api-access-5c2pv\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659627 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sfnr\" (UniqueName: \"kubernetes.io/projected/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-kube-api-access-8sfnr\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659661 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-config\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659712 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-db-sync-config-data\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659767 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-dns-svc\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659829 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659881 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-combined-ca-bundle\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.659905 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.769032 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770315 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wvkfq\" (UniqueName: \"kubernetes.io/projected/9d84b827-da99-45f3-a7ac-a031bc3df2eb-kube-api-access-wvkfq\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770340 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770361 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770398 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5c2pv\" (UniqueName: \"kubernetes.io/projected/884f8917-c720-4359-96d2-155b85958898-kube-api-access-5c2pv\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770425 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-logs\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770454 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sfnr\" (UniqueName: \"kubernetes.io/projected/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-kube-api-access-8sfnr\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770472 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-config\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770496 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-db-sync-config-data\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770547 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770565 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-dns-svc\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770582 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770613 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-combined-ca-bundle\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770629 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770647 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770690 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.770709 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.772810 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-config\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.773105 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-nb\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.773410 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-sb\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.773865 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-dns-svc\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.777645 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-db-sync-config-data\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.787221 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-combined-ca-bundle\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.804861 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sfnr\" (UniqueName: \"kubernetes.io/projected/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-kube-api-access-8sfnr\") pod \"barbican-db-sync-9q2wd\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.821171 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5c2pv\" (UniqueName: \"kubernetes.io/projected/884f8917-c720-4359-96d2-155b85958898-kube-api-access-5c2pv\") pod \"dnsmasq-dns-56798b757f-lrbxr\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.872954 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873019 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873065 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873084 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873106 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wvkfq\" (UniqueName: \"kubernetes.io/projected/9d84b827-da99-45f3-a7ac-a031bc3df2eb-kube-api-access-wvkfq\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873124 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873143 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873201 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-logs\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.873693 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-logs\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.875045 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.882862 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.889681 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.892349 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-config-data\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.893221 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-scripts\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.893135 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.905361 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.959412 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wvkfq\" (UniqueName: \"kubernetes.io/projected/9d84b827-da99-45f3-a7ac-a031bc3df2eb-kube-api-access-wvkfq\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.961025 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:55 crc kubenswrapper[4961]: I1205 17:50:55.978009 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " pod="openstack/glance-default-external-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.042973 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-nfk5t"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.103575 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.108338 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.112109 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.112334 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.116150 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.135147 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-rxkp9"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.159698 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.287210 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.297559 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-config\") pod \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.297672 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s7l55\" (UniqueName: \"kubernetes.io/projected/06c57cab-2995-4dd3-bd4a-9cee5adeb528-kube-api-access-s7l55\") pod \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.297733 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-nb\") pod \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.297901 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-sb\") pod \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.297939 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-dns-svc\") pod \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\" (UID: \"06c57cab-2995-4dd3-bd4a-9cee5adeb528\") " Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298208 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-logs\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298250 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-config-data\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298269 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298301 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298383 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298411 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-scripts\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298436 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.298460 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7f7rj\" (UniqueName: \"kubernetes.io/projected/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-kube-api-access-7f7rj\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.303694 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06c57cab-2995-4dd3-bd4a-9cee5adeb528-kube-api-access-s7l55" (OuterVolumeSpecName: "kube-api-access-s7l55") pod "06c57cab-2995-4dd3-bd4a-9cee5adeb528" (UID: "06c57cab-2995-4dd3-bd4a-9cee5adeb528"). InnerVolumeSpecName "kube-api-access-s7l55". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.379409 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "06c57cab-2995-4dd3-bd4a-9cee5adeb528" (UID: "06c57cab-2995-4dd3-bd4a-9cee5adeb528"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.379443 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-config" (OuterVolumeSpecName: "config") pod "06c57cab-2995-4dd3-bd4a-9cee5adeb528" (UID: "06c57cab-2995-4dd3-bd4a-9cee5adeb528"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.379506 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "06c57cab-2995-4dd3-bd4a-9cee5adeb528" (UID: "06c57cab-2995-4dd3-bd4a-9cee5adeb528"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.379593 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "06c57cab-2995-4dd3-bd4a-9cee5adeb528" (UID: "06c57cab-2995-4dd3-bd4a-9cee5adeb528"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.391509 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rxkp9" event={"ID":"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7","Type":"ContainerStarted","Data":"3ebdee1b230f891fff78a66a2d981526de36ad053376b0f1ecc5042d2d40549f"} Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400232 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400283 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-scripts\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400310 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400335 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7f7rj\" (UniqueName: \"kubernetes.io/projected/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-kube-api-access-7f7rj\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400377 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-logs\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400403 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-config-data\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400422 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400446 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400516 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400534 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400546 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400556 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s7l55\" (UniqueName: \"kubernetes.io/projected/06c57cab-2995-4dd3-bd4a-9cee5adeb528-kube-api-access-s7l55\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.400567 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/06c57cab-2995-4dd3-bd4a-9cee5adeb528-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.401824 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.407382 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.408445 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.409814 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-logs\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.412297 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-scripts\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.417240 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-config-data\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.426422 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" event={"ID":"06c57cab-2995-4dd3-bd4a-9cee5adeb528","Type":"ContainerDied","Data":"c4ad4535e2fc88b75c0138516a939f67ab6937d08b62394b937f3563bf34d0a3"} Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.426475 4961 scope.go:117] "RemoveContainer" containerID="1a46393f02e3a85988c0e741e5fccab1ce0e88c97a7415a499ca6bccbb072e17" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.426626 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-74dc88fc-2sqmx" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.426851 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.435318 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" event={"ID":"da25d01c-6513-43e2-aa19-5c380434494c","Type":"ContainerStarted","Data":"00f382407db00bba6a5cc9382e42662fbdc3f8a9159bec66ca82e452ef677476"} Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.437641 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7f7rj\" (UniqueName: \"kubernetes.io/projected/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-kube-api-access-7f7rj\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.453167 4961 scope.go:117] "RemoveContainer" containerID="6cddd34349518ac198931cd6a455617115cc24a446be8b3fea08c1864a956f6a" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.464554 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.509196 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2sqmx"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.526879 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-74dc88fc-2sqmx"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.555522 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-db-sync-sd82p"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.586741 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-d5754c6c5-dtppz"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.601629 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-db-sync-pvww2"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.623192 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.716564 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-dd8dcd4b5-hjh8n"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.745577 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-db-sync-9q2wd"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.751185 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.759621 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-db-sync-k7c2f"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.774323 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-lrbxr"] Dec 05 17:50:56 crc kubenswrapper[4961]: I1205 17:50:56.879218 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" path="/var/lib/kubelet/pods/06c57cab-2995-4dd3-bd4a-9cee5adeb528/volumes" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.023928 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:50:57 crc kubenswrapper[4961]: W1205 17:50:57.031355 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9d84b827_da99_45f3_a7ac_a031bc3df2eb.slice/crio-c13c97ee82f0cd2245a6e73a1833ac5c2e4aaf5929ffa33205c94e8572c065c7 WatchSource:0}: Error finding container c13c97ee82f0cd2245a6e73a1833ac5c2e4aaf5929ffa33205c94e8572c065c7: Status 404 returned error can't find the container with id c13c97ee82f0cd2245a6e73a1833ac5c2e4aaf5929ffa33205c94e8572c065c7 Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.459287 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9q2wd" event={"ID":"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0","Type":"ContainerStarted","Data":"3e44a32dabac08d85e27e88dfffc20fb7fe3312f8764b7e8934127fb0b806bca"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.464100 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.466284 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7c2f" event={"ID":"125fc58a-f251-4b81-98e7-eca6a2c72a8e","Type":"ContainerStarted","Data":"6237669b0ed6aa8e48afe57f7036ae3cb987b77acac964c204ba3c82c32c376d"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.480813 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd8dcd4b5-hjh8n" event={"ID":"e769efa9-1494-4b54-ac9f-97ea6bd0c55d","Type":"ContainerStarted","Data":"d4b6a8e0f9972770f736007324fbbd85e532223ee165d01637c39903e6621ee3"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.483030 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerStarted","Data":"6574561f9d3bc45fcf61fda6ff0b2ae8099cd95ed1d2e56e72f8ddf334484290"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.492560 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sd82p" event={"ID":"b58877bc-157d-4919-9418-e5b306dff028","Type":"ContainerStarted","Data":"b0f14d72e67dbe1c466aa0b1f46aac1c0805eac023e62263017206534e3d1bbe"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.492601 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sd82p" event={"ID":"b58877bc-157d-4919-9418-e5b306dff028","Type":"ContainerStarted","Data":"97ecb6ac6554d8f7c40eac4bcb6d7ea6ea9d3dbe548277eeb056e24fc68245d5"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.531143 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-db-sync-sd82p" podStartSLOduration=3.5311247679999997 podStartE2EDuration="3.531124768s" podCreationTimestamp="2025-12-05 17:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:57.527167681 +0000 UTC m=+1063.588318164" watchObservedRunningTime="2025-12-05 17:50:57.531124768 +0000 UTC m=+1063.592275241" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.579228 4961 generic.go:334] "Generic (PLEG): container finished" podID="da25d01c-6513-43e2-aa19-5c380434494c" containerID="3b39c9409226a07ec32e084943be979384a6842a19cf6e153939ec485cf76ab0" exitCode=0 Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.579347 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" event={"ID":"da25d01c-6513-43e2-aa19-5c380434494c","Type":"ContainerDied","Data":"3b39c9409226a07ec32e084943be979384a6842a19cf6e153939ec485cf76ab0"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.627466 4961 generic.go:334] "Generic (PLEG): container finished" podID="884f8917-c720-4359-96d2-155b85958898" containerID="e0a8d9d02a351869eb72ad4349033b0b259564f9845d083d46b1cd4deb7271f2" exitCode=0 Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.627548 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" event={"ID":"884f8917-c720-4359-96d2-155b85958898","Type":"ContainerDied","Data":"e0a8d9d02a351869eb72ad4349033b0b259564f9845d083d46b1cd4deb7271f2"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.627599 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" event={"ID":"884f8917-c720-4359-96d2-155b85958898","Type":"ContainerStarted","Data":"e287c5b40a6b0e6bd18f285ad3ab0d4c018bc142612edee8bd61823be4dc3925"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.630137 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d5754c6c5-dtppz"] Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.634054 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5754c6c5-dtppz" event={"ID":"1004621a-ebc0-4b35-9a52-07089f68e790","Type":"ContainerStarted","Data":"1b35e18a3081f505597f807e34a357ce278d88a2406b9a53b2fe2ca0b4abbbd1"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.636546 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rxkp9" event={"ID":"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7","Type":"ContainerStarted","Data":"04c35517b40d2cfc69e880c0d5a758e9902cf4718a8346412d5b30149498f7db"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.649111 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.653218 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-7cd9f578cf-pwnpz"] Dec 05 17:50:57 crc kubenswrapper[4961]: E1205 17:50:57.654147 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerName="init" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.654168 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerName="init" Dec 05 17:50:57 crc kubenswrapper[4961]: E1205 17:50:57.654214 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerName="dnsmasq-dns" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.654222 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerName="dnsmasq-dns" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.654660 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="06c57cab-2995-4dd3-bd4a-9cee5adeb528" containerName="dnsmasq-dns" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.656859 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.672978 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pvww2" event={"ID":"62cd878c-721b-46b4-87bb-1573a9fcf6d9","Type":"ContainerStarted","Data":"bef5a36b52934ba69bb40de54cdeab785fdb161760596ebae67ad96c35d76968"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.746047 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d84b827-da99-45f3-a7ac-a031bc3df2eb","Type":"ContainerStarted","Data":"c13c97ee82f0cd2245a6e73a1833ac5c2e4aaf5929ffa33205c94e8572c065c7"} Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.778433 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-scripts\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.778477 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwwp2\" (UniqueName: \"kubernetes.io/projected/61ae97fc-6378-4d23-a37b-4485f846fcbf-kube-api-access-vwwp2\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.778573 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-config-data\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.778676 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61ae97fc-6378-4d23-a37b-4485f846fcbf-horizon-secret-key\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.778714 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61ae97fc-6378-4d23-a37b-4485f846fcbf-logs\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.793820 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cd9f578cf-pwnpz"] Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.849604 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.882537 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61ae97fc-6378-4d23-a37b-4485f846fcbf-logs\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.882618 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-scripts\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.882640 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwwp2\" (UniqueName: \"kubernetes.io/projected/61ae97fc-6378-4d23-a37b-4485f846fcbf-kube-api-access-vwwp2\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.882701 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-config-data\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.882762 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61ae97fc-6378-4d23-a37b-4485f846fcbf-horizon-secret-key\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.884500 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-scripts\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.884811 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61ae97fc-6378-4d23-a37b-4485f846fcbf-logs\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.886213 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-config-data\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.903761 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61ae97fc-6378-4d23-a37b-4485f846fcbf-horizon-secret-key\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.926071 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwwp2\" (UniqueName: \"kubernetes.io/projected/61ae97fc-6378-4d23-a37b-4485f846fcbf-kube-api-access-vwwp2\") pod \"horizon-7cd9f578cf-pwnpz\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.926314 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.978616 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:50:57 crc kubenswrapper[4961]: I1205 17:50:57.987366 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-rxkp9" podStartSLOduration=3.987344725 podStartE2EDuration="3.987344725s" podCreationTimestamp="2025-12-05 17:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:57.87237418 +0000 UTC m=+1063.933524643" watchObservedRunningTime="2025-12-05 17:50:57.987344725 +0000 UTC m=+1064.048495198" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.393636 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.513816 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-sb\") pod \"da25d01c-6513-43e2-aa19-5c380434494c\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.514585 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jcpbj\" (UniqueName: \"kubernetes.io/projected/da25d01c-6513-43e2-aa19-5c380434494c-kube-api-access-jcpbj\") pod \"da25d01c-6513-43e2-aa19-5c380434494c\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.514634 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-config\") pod \"da25d01c-6513-43e2-aa19-5c380434494c\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.514667 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-nb\") pod \"da25d01c-6513-43e2-aa19-5c380434494c\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.514754 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-dns-svc\") pod \"da25d01c-6513-43e2-aa19-5c380434494c\" (UID: \"da25d01c-6513-43e2-aa19-5c380434494c\") " Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.527809 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/da25d01c-6513-43e2-aa19-5c380434494c-kube-api-access-jcpbj" (OuterVolumeSpecName: "kube-api-access-jcpbj") pod "da25d01c-6513-43e2-aa19-5c380434494c" (UID: "da25d01c-6513-43e2-aa19-5c380434494c"). InnerVolumeSpecName "kube-api-access-jcpbj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.545218 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "da25d01c-6513-43e2-aa19-5c380434494c" (UID: "da25d01c-6513-43e2-aa19-5c380434494c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.556051 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "da25d01c-6513-43e2-aa19-5c380434494c" (UID: "da25d01c-6513-43e2-aa19-5c380434494c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.563067 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "da25d01c-6513-43e2-aa19-5c380434494c" (UID: "da25d01c-6513-43e2-aa19-5c380434494c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.564478 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-config" (OuterVolumeSpecName: "config") pod "da25d01c-6513-43e2-aa19-5c380434494c" (UID: "da25d01c-6513-43e2-aa19-5c380434494c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.619388 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.619505 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.619519 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.619527 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/da25d01c-6513-43e2-aa19-5c380434494c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.619538 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jcpbj\" (UniqueName: \"kubernetes.io/projected/da25d01c-6513-43e2-aa19-5c380434494c-kube-api-access-jcpbj\") on node \"crc\" DevicePath \"\"" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.769380 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25","Type":"ContainerStarted","Data":"1bb74fbde27ef5a0920f4b7a127914ce7e0c4994ecd9b54db500db90662365cc"} Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.773011 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.773038 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-7d5679f497-nfk5t" event={"ID":"da25d01c-6513-43e2-aa19-5c380434494c","Type":"ContainerDied","Data":"00f382407db00bba6a5cc9382e42662fbdc3f8a9159bec66ca82e452ef677476"} Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.773090 4961 scope.go:117] "RemoveContainer" containerID="3b39c9409226a07ec32e084943be979384a6842a19cf6e153939ec485cf76ab0" Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.914582 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-7cd9f578cf-pwnpz"] Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.935951 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-nfk5t"] Dec 05 17:50:58 crc kubenswrapper[4961]: I1205 17:50:58.952852 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-7d5679f497-nfk5t"] Dec 05 17:50:59 crc kubenswrapper[4961]: W1205 17:50:59.602862 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61ae97fc_6378_4d23_a37b_4485f846fcbf.slice/crio-dd4585e36349d72617e9bf98c3c5dc2d3b359ddd8e5534c5866001da24571842 WatchSource:0}: Error finding container dd4585e36349d72617e9bf98c3c5dc2d3b359ddd8e5534c5866001da24571842: Status 404 returned error can't find the container with id dd4585e36349d72617e9bf98c3c5dc2d3b359ddd8e5534c5866001da24571842 Dec 05 17:50:59 crc kubenswrapper[4961]: I1205 17:50:59.798717 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25","Type":"ContainerStarted","Data":"6ebf534a403746544693b78bc0c1aef119bc38720cc036711d8a59615aa73bb2"} Dec 05 17:50:59 crc kubenswrapper[4961]: I1205 17:50:59.803625 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" event={"ID":"884f8917-c720-4359-96d2-155b85958898","Type":"ContainerStarted","Data":"d7f2b29c64bea537f4a7582ab2c06a54a6b8f245a050c48659ebc62b2e53b237"} Dec 05 17:50:59 crc kubenswrapper[4961]: I1205 17:50:59.803835 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:50:59 crc kubenswrapper[4961]: I1205 17:50:59.805099 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cd9f578cf-pwnpz" event={"ID":"61ae97fc-6378-4d23-a37b-4485f846fcbf","Type":"ContainerStarted","Data":"dd4585e36349d72617e9bf98c3c5dc2d3b359ddd8e5534c5866001da24571842"} Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.817162 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d84b827-da99-45f3-a7ac-a031bc3df2eb","Type":"ContainerStarted","Data":"7a6370ffbe96690414a765ad639522c2b2ea3c91b0fad125defc88fcbcd5fdc0"} Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.820972 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-log" containerID="cri-o://6ebf534a403746544693b78bc0c1aef119bc38720cc036711d8a59615aa73bb2" gracePeriod=30 Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.821183 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25","Type":"ContainerStarted","Data":"128117abee0a4f964bdbdd1ac159790ffab4f607c44c277d18e09c8166f9e915"} Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.821302 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-httpd" containerID="cri-o://128117abee0a4f964bdbdd1ac159790ffab4f607c44c277d18e09c8166f9e915" gracePeriod=30 Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.876891 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" podStartSLOduration=5.8768752509999995 podStartE2EDuration="5.876875251s" podCreationTimestamp="2025-12-05 17:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:50:59.825140717 +0000 UTC m=+1065.886291190" watchObservedRunningTime="2025-12-05 17:51:00.876875251 +0000 UTC m=+1066.938025724" Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.878179 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="da25d01c-6513-43e2-aa19-5c380434494c" path="/var/lib/kubelet/pods/da25d01c-6513-43e2-aa19-5c380434494c/volumes" Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.881647 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=6.881638188 podStartE2EDuration="6.881638188s" podCreationTimestamp="2025-12-05 17:50:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:00.878287485 +0000 UTC m=+1066.939437988" watchObservedRunningTime="2025-12-05 17:51:00.881638188 +0000 UTC m=+1066.942788661" Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.884037 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.898041 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/e533098a-ca28-487e-8471-7a426defda37-etc-swift\") pod \"swift-storage-0\" (UID: \"e533098a-ca28-487e-8471-7a426defda37\") " pod="openstack/swift-storage-0" Dec 05 17:51:00 crc kubenswrapper[4961]: I1205 17:51:00.914250 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-storage-0" Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.837133 4961 generic.go:334] "Generic (PLEG): container finished" podID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerID="128117abee0a4f964bdbdd1ac159790ffab4f607c44c277d18e09c8166f9e915" exitCode=0 Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.837479 4961 generic.go:334] "Generic (PLEG): container finished" podID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerID="6ebf534a403746544693b78bc0c1aef119bc38720cc036711d8a59615aa73bb2" exitCode=143 Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.837223 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25","Type":"ContainerDied","Data":"128117abee0a4f964bdbdd1ac159790ffab4f607c44c277d18e09c8166f9e915"} Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.837603 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25","Type":"ContainerDied","Data":"6ebf534a403746544693b78bc0c1aef119bc38720cc036711d8a59615aa73bb2"} Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.839548 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d84b827-da99-45f3-a7ac-a031bc3df2eb","Type":"ContainerStarted","Data":"88632bc8bd867c19ecb86f583f7ec9fb4cf8560e30947277a734328230f570ae"} Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.839688 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-log" containerID="cri-o://7a6370ffbe96690414a765ad639522c2b2ea3c91b0fad125defc88fcbcd5fdc0" gracePeriod=30 Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.839928 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-httpd" containerID="cri-o://88632bc8bd867c19ecb86f583f7ec9fb4cf8560e30947277a734328230f570ae" gracePeriod=30 Dec 05 17:51:01 crc kubenswrapper[4961]: I1205 17:51:01.872083 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=6.872055476 podStartE2EDuration="6.872055476s" podCreationTimestamp="2025-12-05 17:50:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:01.867636568 +0000 UTC m=+1067.928787061" watchObservedRunningTime="2025-12-05 17:51:01.872055476 +0000 UTC m=+1067.933205949" Dec 05 17:51:02 crc kubenswrapper[4961]: I1205 17:51:02.851397 4961 generic.go:334] "Generic (PLEG): container finished" podID="9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" containerID="04c35517b40d2cfc69e880c0d5a758e9902cf4718a8346412d5b30149498f7db" exitCode=0 Dec 05 17:51:02 crc kubenswrapper[4961]: I1205 17:51:02.851475 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rxkp9" event={"ID":"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7","Type":"ContainerDied","Data":"04c35517b40d2cfc69e880c0d5a758e9902cf4718a8346412d5b30149498f7db"} Dec 05 17:51:02 crc kubenswrapper[4961]: I1205 17:51:02.856281 4961 generic.go:334] "Generic (PLEG): container finished" podID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerID="88632bc8bd867c19ecb86f583f7ec9fb4cf8560e30947277a734328230f570ae" exitCode=0 Dec 05 17:51:02 crc kubenswrapper[4961]: I1205 17:51:02.856304 4961 generic.go:334] "Generic (PLEG): container finished" podID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerID="7a6370ffbe96690414a765ad639522c2b2ea3c91b0fad125defc88fcbcd5fdc0" exitCode=143 Dec 05 17:51:02 crc kubenswrapper[4961]: I1205 17:51:02.856321 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d84b827-da99-45f3-a7ac-a031bc3df2eb","Type":"ContainerDied","Data":"88632bc8bd867c19ecb86f583f7ec9fb4cf8560e30947277a734328230f570ae"} Dec 05 17:51:02 crc kubenswrapper[4961]: I1205 17:51:02.856341 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d84b827-da99-45f3-a7ac-a031bc3df2eb","Type":"ContainerDied","Data":"7a6370ffbe96690414a765ad639522c2b2ea3c91b0fad125defc88fcbcd5fdc0"} Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.748746 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dd8dcd4b5-hjh8n"] Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.782921 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-65bb59d746-cqlw9"] Dec 05 17:51:06 crc kubenswrapper[4961]: E1205 17:51:04.783648 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="da25d01c-6513-43e2-aa19-5c380434494c" containerName="init" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.783664 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="da25d01c-6513-43e2-aa19-5c380434494c" containerName="init" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.783880 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="da25d01c-6513-43e2-aa19-5c380434494c" containerName="init" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.785855 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.789886 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-horizon-svc" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.806683 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-65bb59d746-cqlw9"] Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.850270 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7cd9f578cf-pwnpz"] Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.888224 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/horizon-78bb69647d-95ptt"] Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.889764 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.890302 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78bb69647d-95ptt"] Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.901210 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01eef206-68f4-4923-8253-2a130ba0dca3-logs\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.902891 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-combined-ca-bundle\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.902975 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-secret-key\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.903010 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-scripts\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.903165 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-tls-certs\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.903198 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-config-data\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:04.903225 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lb54t\" (UniqueName: \"kubernetes.io/projected/01eef206-68f4-4923-8253-2a130ba0dca3-kube-api-access-lb54t\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.006721 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3dcddde-25f9-446a-8d5f-d9468cfa6940-scripts\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.007386 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-combined-ca-bundle\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.007484 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-tls-certs\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.007529 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-config-data\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.010258 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-config-data\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.010338 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lb54t\" (UniqueName: \"kubernetes.io/projected/01eef206-68f4-4923-8253-2a130ba0dca3-kube-api-access-lb54t\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011227 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01eef206-68f4-4923-8253-2a130ba0dca3-logs\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011281 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-combined-ca-bundle\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011375 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-horizon-tls-certs\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011414 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcddde-25f9-446a-8d5f-d9468cfa6940-config-data\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011442 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-secret-key\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011466 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-scripts\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011493 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dcddde-25f9-446a-8d5f-d9468cfa6940-logs\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011552 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kkw9h\" (UniqueName: \"kubernetes.io/projected/a3dcddde-25f9-446a-8d5f-d9468cfa6940-kube-api-access-kkw9h\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.011615 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-horizon-secret-key\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.013073 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-scripts\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.013382 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01eef206-68f4-4923-8253-2a130ba0dca3-logs\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.015040 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-combined-ca-bundle\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.017890 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-secret-key\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.019163 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-tls-certs\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.029294 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lb54t\" (UniqueName: \"kubernetes.io/projected/01eef206-68f4-4923-8253-2a130ba0dca3-kube-api-access-lb54t\") pod \"horizon-65bb59d746-cqlw9\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.116079 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.116941 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dcddde-25f9-446a-8d5f-d9468cfa6940-logs\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117046 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kkw9h\" (UniqueName: \"kubernetes.io/projected/a3dcddde-25f9-446a-8d5f-d9468cfa6940-kube-api-access-kkw9h\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117092 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-horizon-secret-key\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117315 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dcddde-25f9-446a-8d5f-d9468cfa6940-logs\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117415 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3dcddde-25f9-446a-8d5f-d9468cfa6940-scripts\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117436 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-combined-ca-bundle\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117529 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-horizon-tls-certs\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.117548 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcddde-25f9-446a-8d5f-d9468cfa6940-config-data\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.118984 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/a3dcddde-25f9-446a-8d5f-d9468cfa6940-scripts\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.119153 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/a3dcddde-25f9-446a-8d5f-d9468cfa6940-config-data\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.120462 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-horizon-secret-key\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.122883 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-horizon-tls-certs\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.129006 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a3dcddde-25f9-446a-8d5f-d9468cfa6940-combined-ca-bundle\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.133981 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kkw9h\" (UniqueName: \"kubernetes.io/projected/a3dcddde-25f9-446a-8d5f-d9468cfa6940-kube-api-access-kkw9h\") pod \"horizon-78bb69647d-95ptt\" (UID: \"a3dcddde-25f9-446a-8d5f-d9468cfa6940\") " pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.217424 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:05.966977 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:06.037655 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-t65n2"] Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:06.037924 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" containerID="cri-o://761b369dfd98e3c652aab0588518ce5d190a38dcdcc629b8ef251ed12fbb3b31" gracePeriod=10 Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:06.242104 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:06.915370 4961 generic.go:334] "Generic (PLEG): container finished" podID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerID="761b369dfd98e3c652aab0588518ce5d190a38dcdcc629b8ef251ed12fbb3b31" exitCode=0 Dec 05 17:51:06 crc kubenswrapper[4961]: I1205 17:51:06.915706 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" event={"ID":"27a3156a-6997-42ea-888b-ff4a4c2b1988","Type":"ContainerDied","Data":"761b369dfd98e3c652aab0588518ce5d190a38dcdcc629b8ef251ed12fbb3b31"} Dec 05 17:51:11 crc kubenswrapper[4961]: I1205 17:51:11.241649 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: connect: connection refused" Dec 05 17:51:12 crc kubenswrapper[4961]: E1205 17:51:12.619682 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 17:51:12 crc kubenswrapper[4961]: E1205 17:51:12.619969 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n6ch58h8chfhb4h57bh5dhcdh644h8fhbfh598h544h65fh56h5b5h55bh559h5bbh645h59h66hd7h4h57h79h589h89h56bh566h696h6q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-bf249,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-d5754c6c5-dtppz_openstack(1004621a-ebc0-4b35-9a52-07089f68e790): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:51:12 crc kubenswrapper[4961]: E1205 17:51:12.623414 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-d5754c6c5-dtppz" podUID="1004621a-ebc0-4b35-9a52-07089f68e790" Dec 05 17:51:12 crc kubenswrapper[4961]: E1205 17:51:12.629398 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 17:51:12 crc kubenswrapper[4961]: E1205 17:51:12.629560 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n59h57ch5fh5fhcbhdch64fh655h578h5f4h5d6h67dh66dh597h6h568h565h554h666h68bh544h5fdh687h65dh5f8h64chc9hcch5b7h5cdh689h669q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vwwp2,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-7cd9f578cf-pwnpz_openstack(61ae97fc-6378-4d23-a37b-4485f846fcbf): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:51:12 crc kubenswrapper[4961]: E1205 17:51:12.632559 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-7cd9f578cf-pwnpz" podUID="61ae97fc-6378-4d23-a37b-4485f846fcbf" Dec 05 17:51:14 crc kubenswrapper[4961]: E1205 17:51:14.232509 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-horizon:current-podified" Dec 05 17:51:14 crc kubenswrapper[4961]: E1205 17:51:14.234991 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:horizon-log,Image:quay.io/podified-antelope-centos9/openstack-horizon:current-podified,Command:[/bin/bash],Args:[-c tail -n+1 -F /var/log/horizon/horizon.log],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:n8hb4h5h584h7ch64dh5bchb6hb6h695h66ch564hb9h95h5c8h648h5dchdfhd4h559h559h699h58bh677h5c7h648h75h674h5b9h5ch5ffh547q,ValueFrom:nil,},EnvVar{Name:ENABLE_DESIGNATE,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_HEAT,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_IRONIC,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_MANILA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_OCTAVIA,Value:yes,ValueFrom:nil,},EnvVar{Name:ENABLE_WATCHER,Value:no,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},EnvVar{Name:UNPACK_THEME,Value:true,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:logs,ReadOnly:false,MountPath:/var/log/horizon,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-pzz4j,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*48,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*true,RunAsGroup:*42400,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod horizon-dd8dcd4b5-hjh8n_openstack(e769efa9-1494-4b54-ac9f-97ea6bd0c55d): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:51:14 crc kubenswrapper[4961]: E1205 17:51:14.237413 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="[failed to \"StartContainer\" for \"horizon-log\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\", failed to \"StartContainer\" for \"horizon\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-horizon:current-podified\\\"\"]" pod="openstack/horizon-dd8dcd4b5-hjh8n" podUID="e769efa9-1494-4b54-ac9f-97ea6bd0c55d" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.345414 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.351416 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.358857 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.392483 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-credential-keys\") pod \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.392534 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-scripts\") pod \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.392563 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396144 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-scripts\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396186 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-config-data\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396254 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-combined-ca-bundle\") pod \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396280 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-internal-tls-certs\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396315 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-logs\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396340 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4xj7\" (UniqueName: \"kubernetes.io/projected/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-kube-api-access-s4xj7\") pod \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396373 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-config-data\") pod \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396440 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-fernet-keys\") pod \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\" (UID: \"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396462 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-combined-ca-bundle\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396525 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-httpd-run\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.396566 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7f7rj\" (UniqueName: \"kubernetes.io/projected/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-kube-api-access-7f7rj\") pod \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\" (UID: \"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.405681 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-logs" (OuterVolumeSpecName: "logs") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.407258 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.418337 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-scripts" (OuterVolumeSpecName: "scripts") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.418427 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-scripts" (OuterVolumeSpecName: "scripts") pod "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" (UID: "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.418489 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-kube-api-access-7f7rj" (OuterVolumeSpecName: "kube-api-access-7f7rj") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "kube-api-access-7f7rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.418546 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-kube-api-access-s4xj7" (OuterVolumeSpecName: "kube-api-access-s4xj7") pod "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" (UID: "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7"). InnerVolumeSpecName "kube-api-access-s4xj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.419519 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" (UID: "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.450448 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.464249 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" (UID: "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.465591 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" (UID: "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.488990 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.497990 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-public-tls-certs\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498359 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-scripts\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498395 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-httpd-run\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498452 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-combined-ca-bundle\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498481 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-logs\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498547 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wvkfq\" (UniqueName: \"kubernetes.io/projected/9d84b827-da99-45f3-a7ac-a031bc3df2eb-kube-api-access-wvkfq\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498608 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-config-data\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.498633 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\" (UID: \"9d84b827-da99-45f3-a7ac-a031bc3df2eb\") " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.499381 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-logs" (OuterVolumeSpecName: "logs") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.499699 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.499966 4961 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.499986 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500009 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500036 4961 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500048 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500056 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9d84b827-da99-45f3-a7ac-a031bc3df2eb-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500064 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500074 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500082 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4xj7\" (UniqueName: \"kubernetes.io/projected/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-kube-api-access-s4xj7\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500093 4961 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500101 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500108 4961 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.500117 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7f7rj\" (UniqueName: \"kubernetes.io/projected/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-kube-api-access-7f7rj\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.511512 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d84b827-da99-45f3-a7ac-a031bc3df2eb-kube-api-access-wvkfq" (OuterVolumeSpecName: "kube-api-access-wvkfq") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "kube-api-access-wvkfq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.511789 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-config-data" (OuterVolumeSpecName: "config-data") pod "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" (UID: "9e3558d5-7e3a-4848-8f4c-509a5f19a8c7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.515217 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.526825 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.527353 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-config-data" (OuterVolumeSpecName: "config-data") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.541048 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" (UID: "664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.548360 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-scripts" (OuterVolumeSpecName: "scripts") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.553822 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.566559 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.573684 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-config-data" (OuterVolumeSpecName: "config-data") pod "9d84b827-da99-45f3-a7ac-a031bc3df2eb" (UID: "9d84b827-da99-45f3-a7ac-a031bc3df2eb"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602250 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602293 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602304 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602315 4961 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602324 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602336 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wvkfq\" (UniqueName: \"kubernetes.io/projected/9d84b827-da99-45f3-a7ac-a031bc3df2eb-kube-api-access-wvkfq\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602345 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602383 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602392 4961 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9d84b827-da99-45f3-a7ac-a031bc3df2eb-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.602402 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.620531 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 17:51:14 crc kubenswrapper[4961]: I1205 17:51:14.703863 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.005741 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.007148 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:barbican-db-sync,Image:quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified,Command:[/bin/bash],Args:[-c barbican-manage db upgrade],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/barbican/barbican.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8sfnr,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42403,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42403,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod barbican-db-sync-9q2wd_openstack(c3a753a7-88b4-4e0c-a1c6-82e79643c6b0): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.011381 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"9d84b827-da99-45f3-a7ac-a031bc3df2eb","Type":"ContainerDied","Data":"c13c97ee82f0cd2245a6e73a1833ac5c2e4aaf5929ffa33205c94e8572c065c7"} Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.011445 4961 scope.go:117] "RemoveContainer" containerID="88632bc8bd867c19ecb86f583f7ec9fb4cf8560e30947277a734328230f570ae" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.011632 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/barbican-db-sync-9q2wd" podUID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.011672 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.021796 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.021956 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25","Type":"ContainerDied","Data":"1bb74fbde27ef5a0920f4b7a127914ce7e0c4994ecd9b54db500db90662365cc"} Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.036140 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-rxkp9" event={"ID":"9e3558d5-7e3a-4848-8f4c-509a5f19a8c7","Type":"ContainerDied","Data":"3ebdee1b230f891fff78a66a2d981526de36ad053376b0f1ecc5042d2d40549f"} Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.036191 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-rxkp9" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.036206 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3ebdee1b230f891fff78a66a2d981526de36ad053376b0f1ecc5042d2d40549f" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.088268 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.107974 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.122136 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.133529 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.143439 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.143879 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-log" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.143898 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-log" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.143921 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-log" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.143927 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-log" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.143935 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-httpd" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.143942 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-httpd" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.143959 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" containerName="keystone-bootstrap" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.143965 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" containerName="keystone-bootstrap" Dec 05 17:51:15 crc kubenswrapper[4961]: E1205 17:51:15.143975 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-httpd" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.143980 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-httpd" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.144162 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-httpd" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.144173 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" containerName="glance-log" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.144188 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" containerName="keystone-bootstrap" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.144199 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-httpd" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.144207 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" containerName="glance-log" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.145159 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.146905 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-scripts" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.147331 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.147614 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.147761 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-glance-dockercfg-f5xbg" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.153201 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.184962 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.186889 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.190168 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.190463 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.194661 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216228 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-84rxt\" (UniqueName: \"kubernetes.io/projected/792b07a4-55ec-4870-9e27-3b6e4d250b67-kube-api-access-84rxt\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216287 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216350 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-logs\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216373 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-scripts\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216401 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216418 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-config-data\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216455 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.216470 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317712 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-84rxt\" (UniqueName: \"kubernetes.io/projected/792b07a4-55ec-4870-9e27-3b6e4d250b67-kube-api-access-84rxt\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317756 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317813 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317855 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317873 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317931 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-logs\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317953 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmx5j\" (UniqueName: \"kubernetes.io/projected/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-kube-api-access-hmx5j\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.317975 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-scripts\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318005 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318025 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318043 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318065 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318083 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-config-data\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318112 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318140 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.318156 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.320707 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.321177 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.323505 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-logs\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.329539 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-scripts\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.329691 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.334402 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.337395 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-config-data\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.338325 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-84rxt\" (UniqueName: \"kubernetes.io/projected/792b07a4-55ec-4870-9e27-3b6e4d250b67-kube-api-access-84rxt\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.372191 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420632 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmx5j\" (UniqueName: \"kubernetes.io/projected/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-kube-api-access-hmx5j\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420717 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420742 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420762 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420811 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420882 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420925 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.420942 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.421701 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-logs\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.422081 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.423075 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.426482 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.428337 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-scripts\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.432981 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-config-data\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.439313 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.447683 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmx5j\" (UniqueName: \"kubernetes.io/projected/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-kube-api-access-hmx5j\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.453118 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.465295 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.502841 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.538442 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-rxkp9"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.550537 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-rxkp9"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.613690 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-bootstrap-vlqz4"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.614765 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.619104 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.621208 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.621407 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pp6pr" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.621684 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.622303 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"osp-secret" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.634097 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vlqz4"] Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.729482 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-combined-ca-bundle\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.729576 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-fernet-keys\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.730004 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-28c9w\" (UniqueName: \"kubernetes.io/projected/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-kube-api-access-28c9w\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.730086 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-config-data\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.730165 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-scripts\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.730215 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-credential-keys\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.832899 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-fernet-keys\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.833050 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-28c9w\" (UniqueName: \"kubernetes.io/projected/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-kube-api-access-28c9w\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.833104 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-config-data\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.833133 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-scripts\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.833167 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-credential-keys\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.833269 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-combined-ca-bundle\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.837060 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-credential-keys\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.837751 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-scripts\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.838554 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-config-data\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.839432 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-fernet-keys\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.839591 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-combined-ca-bundle\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.850546 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-28c9w\" (UniqueName: \"kubernetes.io/projected/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-kube-api-access-28c9w\") pod \"keystone-bootstrap-vlqz4\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:15 crc kubenswrapper[4961]: I1205 17:51:15.937387 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:16 crc kubenswrapper[4961]: E1205 17:51:16.045990 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"barbican-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-barbican-api:current-podified\\\"\"" pod="openstack/barbican-db-sync-9q2wd" podUID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" Dec 05 17:51:16 crc kubenswrapper[4961]: I1205 17:51:16.883487 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25" path="/var/lib/kubelet/pods/664e2e6a-0fcd-491f-89a0-1d7f0b8b7c25/volumes" Dec 05 17:51:16 crc kubenswrapper[4961]: I1205 17:51:16.888058 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d84b827-da99-45f3-a7ac-a031bc3df2eb" path="/var/lib/kubelet/pods/9d84b827-da99-45f3-a7ac-a031bc3df2eb/volumes" Dec 05 17:51:16 crc kubenswrapper[4961]: I1205 17:51:16.893205 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9e3558d5-7e3a-4848-8f4c-509a5f19a8c7" path="/var/lib/kubelet/pods/9e3558d5-7e3a-4848-8f4c-509a5f19a8c7/volumes" Dec 05 17:51:21 crc kubenswrapper[4961]: I1205 17:51:21.242593 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: i/o timeout" Dec 05 17:51:21 crc kubenswrapper[4961]: I1205 17:51:21.242994 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.295244 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.306032 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482310 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-scripts\") pod \"61ae97fc-6378-4d23-a37b-4485f846fcbf\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482387 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1004621a-ebc0-4b35-9a52-07089f68e790-logs\") pod \"1004621a-ebc0-4b35-9a52-07089f68e790\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482478 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-scripts\") pod \"1004621a-ebc0-4b35-9a52-07089f68e790\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482509 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61ae97fc-6378-4d23-a37b-4485f846fcbf-horizon-secret-key\") pod \"61ae97fc-6378-4d23-a37b-4485f846fcbf\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482529 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-config-data\") pod \"61ae97fc-6378-4d23-a37b-4485f846fcbf\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482559 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-config-data\") pod \"1004621a-ebc0-4b35-9a52-07089f68e790\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482587 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1004621a-ebc0-4b35-9a52-07089f68e790-horizon-secret-key\") pod \"1004621a-ebc0-4b35-9a52-07089f68e790\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482634 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwwp2\" (UniqueName: \"kubernetes.io/projected/61ae97fc-6378-4d23-a37b-4485f846fcbf-kube-api-access-vwwp2\") pod \"61ae97fc-6378-4d23-a37b-4485f846fcbf\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482671 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61ae97fc-6378-4d23-a37b-4485f846fcbf-logs\") pod \"61ae97fc-6378-4d23-a37b-4485f846fcbf\" (UID: \"61ae97fc-6378-4d23-a37b-4485f846fcbf\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482747 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf249\" (UniqueName: \"kubernetes.io/projected/1004621a-ebc0-4b35-9a52-07089f68e790-kube-api-access-bf249\") pod \"1004621a-ebc0-4b35-9a52-07089f68e790\" (UID: \"1004621a-ebc0-4b35-9a52-07089f68e790\") " Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.482849 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1004621a-ebc0-4b35-9a52-07089f68e790-logs" (OuterVolumeSpecName: "logs") pod "1004621a-ebc0-4b35-9a52-07089f68e790" (UID: "1004621a-ebc0-4b35-9a52-07089f68e790"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.483064 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1004621a-ebc0-4b35-9a52-07089f68e790-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.483117 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-scripts" (OuterVolumeSpecName: "scripts") pod "1004621a-ebc0-4b35-9a52-07089f68e790" (UID: "1004621a-ebc0-4b35-9a52-07089f68e790"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.483177 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-config-data" (OuterVolumeSpecName: "config-data") pod "1004621a-ebc0-4b35-9a52-07089f68e790" (UID: "1004621a-ebc0-4b35-9a52-07089f68e790"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.483577 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61ae97fc-6378-4d23-a37b-4485f846fcbf-logs" (OuterVolumeSpecName: "logs") pod "61ae97fc-6378-4d23-a37b-4485f846fcbf" (UID: "61ae97fc-6378-4d23-a37b-4485f846fcbf"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.483638 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-config-data" (OuterVolumeSpecName: "config-data") pod "61ae97fc-6378-4d23-a37b-4485f846fcbf" (UID: "61ae97fc-6378-4d23-a37b-4485f846fcbf"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.483884 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-scripts" (OuterVolumeSpecName: "scripts") pod "61ae97fc-6378-4d23-a37b-4485f846fcbf" (UID: "61ae97fc-6378-4d23-a37b-4485f846fcbf"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.488352 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1004621a-ebc0-4b35-9a52-07089f68e790-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "1004621a-ebc0-4b35-9a52-07089f68e790" (UID: "1004621a-ebc0-4b35-9a52-07089f68e790"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.488856 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61ae97fc-6378-4d23-a37b-4485f846fcbf-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "61ae97fc-6378-4d23-a37b-4485f846fcbf" (UID: "61ae97fc-6378-4d23-a37b-4485f846fcbf"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.488952 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61ae97fc-6378-4d23-a37b-4485f846fcbf-kube-api-access-vwwp2" (OuterVolumeSpecName: "kube-api-access-vwwp2") pod "61ae97fc-6378-4d23-a37b-4485f846fcbf" (UID: "61ae97fc-6378-4d23-a37b-4485f846fcbf"). InnerVolumeSpecName "kube-api-access-vwwp2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.489447 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1004621a-ebc0-4b35-9a52-07089f68e790-kube-api-access-bf249" (OuterVolumeSpecName: "kube-api-access-bf249") pod "1004621a-ebc0-4b35-9a52-07089f68e790" (UID: "1004621a-ebc0-4b35-9a52-07089f68e790"). InnerVolumeSpecName "kube-api-access-bf249". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584883 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf249\" (UniqueName: \"kubernetes.io/projected/1004621a-ebc0-4b35-9a52-07089f68e790-kube-api-access-bf249\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584930 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584941 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584952 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61ae97fc-6378-4d23-a37b-4485f846fcbf-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584964 4961 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/61ae97fc-6378-4d23-a37b-4485f846fcbf-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584975 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/1004621a-ebc0-4b35-9a52-07089f68e790-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584985 4961 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/1004621a-ebc0-4b35-9a52-07089f68e790-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.584994 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwwp2\" (UniqueName: \"kubernetes.io/projected/61ae97fc-6378-4d23-a37b-4485f846fcbf-kube-api-access-vwwp2\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:23 crc kubenswrapper[4961]: I1205 17:51:23.585002 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/61ae97fc-6378-4d23-a37b-4485f846fcbf-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.103367 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-d5754c6c5-dtppz" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.103369 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-d5754c6c5-dtppz" event={"ID":"1004621a-ebc0-4b35-9a52-07089f68e790","Type":"ContainerDied","Data":"1b35e18a3081f505597f807e34a357ce278d88a2406b9a53b2fe2ca0b4abbbd1"} Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.105249 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-7cd9f578cf-pwnpz" event={"ID":"61ae97fc-6378-4d23-a37b-4485f846fcbf","Type":"ContainerDied","Data":"dd4585e36349d72617e9bf98c3c5dc2d3b359ddd8e5534c5866001da24571842"} Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.105322 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-7cd9f578cf-pwnpz" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.191322 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-d5754c6c5-dtppz"] Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.201766 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-d5754c6c5-dtppz"] Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.217386 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-7cd9f578cf-pwnpz"] Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.225291 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-7cd9f578cf-pwnpz"] Dec 05 17:51:24 crc kubenswrapper[4961]: E1205 17:51:24.386830 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified" Dec 05 17:51:24 crc kubenswrapper[4961]: E1205 17:51:24.387007 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceilometer-central-agent,Image:quay.io/podified-antelope-centos9/openstack-ceilometer-central:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:CONFIG_HASH,Value:nfdh589h5c8h9fh59fh66ch657h648hc4h65ch85hd4h687hb6h577h57fh5c4h56hch697h64bh555h555hf8h66dh88h54h586h575h566h6fh666q,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:scripts,ReadOnly:true,MountPath:/var/lib/openstack/bin,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/openstack/config,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:ceilometer-central-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ltz74,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:&Probe{ProbeHandler:ProbeHandler{Exec:&ExecAction{Command:[/usr/bin/python3 /var/lib/openstack/bin/centralhealth.py],},HTTPGet:nil,TCPSocket:nil,GRPC:nil,},InitialDelaySeconds:300,TimeoutSeconds:5,PeriodSeconds:5,SuccessThreshold:1,FailureThreshold:3,TerminationGracePeriodSeconds:nil,},ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:nil,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceilometer-0_openstack(c30d6edf-8519-48cf-bcb0-f35c08e19a8b): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.513972 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.517823 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.704415 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-config\") pod \"27a3156a-6997-42ea-888b-ff4a4c2b1988\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.704785 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-nb\") pod \"27a3156a-6997-42ea-888b-ff4a4c2b1988\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.704951 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hw5hq\" (UniqueName: \"kubernetes.io/projected/27a3156a-6997-42ea-888b-ff4a4c2b1988-kube-api-access-hw5hq\") pod \"27a3156a-6997-42ea-888b-ff4a4c2b1988\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705006 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-dns-svc\") pod \"27a3156a-6997-42ea-888b-ff4a4c2b1988\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705061 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-logs\") pod \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705089 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-scripts\") pod \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705152 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pzz4j\" (UniqueName: \"kubernetes.io/projected/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-kube-api-access-pzz4j\") pod \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705181 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-horizon-secret-key\") pod \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705219 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-config-data\") pod \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\" (UID: \"e769efa9-1494-4b54-ac9f-97ea6bd0c55d\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705261 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-sb\") pod \"27a3156a-6997-42ea-888b-ff4a4c2b1988\" (UID: \"27a3156a-6997-42ea-888b-ff4a4c2b1988\") " Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.705983 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-logs" (OuterVolumeSpecName: "logs") pod "e769efa9-1494-4b54-ac9f-97ea6bd0c55d" (UID: "e769efa9-1494-4b54-ac9f-97ea6bd0c55d"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.706133 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.706644 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-config-data" (OuterVolumeSpecName: "config-data") pod "e769efa9-1494-4b54-ac9f-97ea6bd0c55d" (UID: "e769efa9-1494-4b54-ac9f-97ea6bd0c55d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.707839 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-scripts" (OuterVolumeSpecName: "scripts") pod "e769efa9-1494-4b54-ac9f-97ea6bd0c55d" (UID: "e769efa9-1494-4b54-ac9f-97ea6bd0c55d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.710254 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-kube-api-access-pzz4j" (OuterVolumeSpecName: "kube-api-access-pzz4j") pod "e769efa9-1494-4b54-ac9f-97ea6bd0c55d" (UID: "e769efa9-1494-4b54-ac9f-97ea6bd0c55d"). InnerVolumeSpecName "kube-api-access-pzz4j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.710837 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "e769efa9-1494-4b54-ac9f-97ea6bd0c55d" (UID: "e769efa9-1494-4b54-ac9f-97ea6bd0c55d"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.725380 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27a3156a-6997-42ea-888b-ff4a4c2b1988-kube-api-access-hw5hq" (OuterVolumeSpecName: "kube-api-access-hw5hq") pod "27a3156a-6997-42ea-888b-ff4a4c2b1988" (UID: "27a3156a-6997-42ea-888b-ff4a4c2b1988"). InnerVolumeSpecName "kube-api-access-hw5hq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.769988 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "27a3156a-6997-42ea-888b-ff4a4c2b1988" (UID: "27a3156a-6997-42ea-888b-ff4a4c2b1988"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.771220 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "27a3156a-6997-42ea-888b-ff4a4c2b1988" (UID: "27a3156a-6997-42ea-888b-ff4a4c2b1988"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.775436 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "27a3156a-6997-42ea-888b-ff4a4c2b1988" (UID: "27a3156a-6997-42ea-888b-ff4a4c2b1988"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.786459 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-config" (OuterVolumeSpecName: "config") pod "27a3156a-6997-42ea-888b-ff4a4c2b1988" (UID: "27a3156a-6997-42ea-888b-ff4a4c2b1988"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808904 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808932 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808948 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hw5hq\" (UniqueName: \"kubernetes.io/projected/27a3156a-6997-42ea-888b-ff4a4c2b1988-kube-api-access-hw5hq\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808958 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808968 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808978 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pzz4j\" (UniqueName: \"kubernetes.io/projected/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-kube-api-access-pzz4j\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.808989 4961 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.809000 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/e769efa9-1494-4b54-ac9f-97ea6bd0c55d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.809011 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/27a3156a-6997-42ea-888b-ff4a4c2b1988-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.898232 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1004621a-ebc0-4b35-9a52-07089f68e790" path="/var/lib/kubelet/pods/1004621a-ebc0-4b35-9a52-07089f68e790/volumes" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.898903 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="61ae97fc-6378-4d23-a37b-4485f846fcbf" path="/var/lib/kubelet/pods/61ae97fc-6378-4d23-a37b-4485f846fcbf/volumes" Dec 05 17:51:24 crc kubenswrapper[4961]: I1205 17:51:24.900958 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-storage-0"] Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.117622 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-dd8dcd4b5-hjh8n" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.117630 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-dd8dcd4b5-hjh8n" event={"ID":"e769efa9-1494-4b54-ac9f-97ea6bd0c55d","Type":"ContainerDied","Data":"d4b6a8e0f9972770f736007324fbbd85e532223ee165d01637c39903e6621ee3"} Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.119634 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" event={"ID":"27a3156a-6997-42ea-888b-ff4a4c2b1988","Type":"ContainerDied","Data":"1e94cd75ef87e30eecf906cc11d1b1c7725a1cd0d104f11f277a4e2d1bcd8f4b"} Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.119717 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.122384 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sd82p" event={"ID":"b58877bc-157d-4919-9418-e5b306dff028","Type":"ContainerDied","Data":"b0f14d72e67dbe1c466aa0b1f46aac1c0805eac023e62263017206534e3d1bbe"} Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.122592 4961 generic.go:334] "Generic (PLEG): container finished" podID="b58877bc-157d-4919-9418-e5b306dff028" containerID="b0f14d72e67dbe1c466aa0b1f46aac1c0805eac023e62263017206534e3d1bbe" exitCode=0 Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.157917 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-t65n2"] Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.164311 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b8fbc5445-t65n2"] Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.199303 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-dd8dcd4b5-hjh8n"] Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.207321 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-dd8dcd4b5-hjh8n"] Dec 05 17:51:25 crc kubenswrapper[4961]: E1205 17:51:25.640953 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified" Dec 05 17:51:25 crc kubenswrapper[4961]: E1205 17:51:25.641165 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:cinder-db-sync,Image:quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified,Command:[/bin/bash],Args:[-c /usr/local/bin/kolla_set_configs && /usr/local/bin/kolla_start],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:TRUE,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:etc-machine-id,ReadOnly:true,MountPath:/etc/machine-id,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:scripts,ReadOnly:true,MountPath:/usr/local/bin/container-scripts,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/config-data/merged,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:db-sync-config-data,ReadOnly:true,MountPath:/etc/cinder/cinder.conf.d,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/var/lib/kolla/config_files/config.json,SubPath:db-sync-config.json,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:combined-ca-bundle,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rjqbl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:nil,Privileged:nil,SELinuxOptions:nil,RunAsUser:*0,RunAsNonRoot:nil,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod cinder-db-sync-pvww2_openstack(62cd878c-721b-46b4-87bb-1573a9fcf6d9): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 17:51:25 crc kubenswrapper[4961]: E1205 17:51:25.642346 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/cinder-db-sync-pvww2" podUID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.652392 4961 scope.go:117] "RemoveContainer" containerID="7a6370ffbe96690414a765ad639522c2b2ea3c91b0fad125defc88fcbcd5fdc0" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.851951 4961 scope.go:117] "RemoveContainer" containerID="128117abee0a4f964bdbdd1ac159790ffab4f607c44c277d18e09c8166f9e915" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.884108 4961 scope.go:117] "RemoveContainer" containerID="6ebf534a403746544693b78bc0c1aef119bc38720cc036711d8a59615aa73bb2" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.906874 4961 scope.go:117] "RemoveContainer" containerID="761b369dfd98e3c652aab0588518ce5d190a38dcdcc629b8ef251ed12fbb3b31" Dec 05 17:51:25 crc kubenswrapper[4961]: I1205 17:51:25.932441 4961 scope.go:117] "RemoveContainer" containerID="179c9efc21fecd93f996c3f35a2a1e198b49545d55959ab1aff0193c90987e87" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.136048 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7c2f" event={"ID":"125fc58a-f251-4b81-98e7-eca6a2c72a8e","Type":"ContainerStarted","Data":"4050ed7379c04d5387701f9805b61b386236cc4ac4ae79db61a625cadb152ff2"} Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.137193 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"38fa4ee542bd2a6140e8965cf0405b7d82202100fa53546a0b529ba63b8e76f8"} Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.137699 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-78bb69647d-95ptt"] Dec 05 17:51:26 crc kubenswrapper[4961]: E1205 17:51:26.154852 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"cinder-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-cinder-api:current-podified\\\"\"" pod="openstack/cinder-db-sync-pvww2" podUID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.166085 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-db-sync-k7c2f" podStartSLOduration=3.5620799119999997 podStartE2EDuration="31.166066361s" podCreationTimestamp="2025-12-05 17:50:55 +0000 UTC" firstStartedPulling="2025-12-05 17:50:56.769147702 +0000 UTC m=+1062.830298175" lastFinishedPulling="2025-12-05 17:51:24.373134161 +0000 UTC m=+1090.434284624" observedRunningTime="2025-12-05 17:51:26.152013256 +0000 UTC m=+1092.213163739" watchObservedRunningTime="2025-12-05 17:51:26.166066361 +0000 UTC m=+1092.227216834" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.221550 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-bootstrap-vlqz4"] Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.230684 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/horizon-65bb59d746-cqlw9"] Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.243127 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/dnsmasq-dns-b8fbc5445-t65n2" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" probeResult="failure" output="dial tcp 10.217.0.119:5353: i/o timeout" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.308481 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.435032 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:51:26 crc kubenswrapper[4961]: W1205 17:51:26.511910 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01eef206_68f4_4923_8253_2a130ba0dca3.slice/crio-774812ddff6ecc48ef5bfe499a4b3c36832c79aa054864d429285d7f1156f23d WatchSource:0}: Error finding container 774812ddff6ecc48ef5bfe499a4b3c36832c79aa054864d429285d7f1156f23d: Status 404 returned error can't find the container with id 774812ddff6ecc48ef5bfe499a4b3c36832c79aa054864d429285d7f1156f23d Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.524955 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sd82p" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.539502 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-combined-ca-bundle\") pod \"b58877bc-157d-4919-9418-e5b306dff028\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.539695 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-config\") pod \"b58877bc-157d-4919-9418-e5b306dff028\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.539835 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jvz5q\" (UniqueName: \"kubernetes.io/projected/b58877bc-157d-4919-9418-e5b306dff028-kube-api-access-jvz5q\") pod \"b58877bc-157d-4919-9418-e5b306dff028\" (UID: \"b58877bc-157d-4919-9418-e5b306dff028\") " Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.548254 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b58877bc-157d-4919-9418-e5b306dff028-kube-api-access-jvz5q" (OuterVolumeSpecName: "kube-api-access-jvz5q") pod "b58877bc-157d-4919-9418-e5b306dff028" (UID: "b58877bc-157d-4919-9418-e5b306dff028"). InnerVolumeSpecName "kube-api-access-jvz5q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.564629 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-config" (OuterVolumeSpecName: "config") pod "b58877bc-157d-4919-9418-e5b306dff028" (UID: "b58877bc-157d-4919-9418-e5b306dff028"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.567284 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b58877bc-157d-4919-9418-e5b306dff028" (UID: "b58877bc-157d-4919-9418-e5b306dff028"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.641804 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jvz5q\" (UniqueName: \"kubernetes.io/projected/b58877bc-157d-4919-9418-e5b306dff028-kube-api-access-jvz5q\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.641844 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.641856 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/b58877bc-157d-4919-9418-e5b306dff028-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.873007 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" path="/var/lib/kubelet/pods/27a3156a-6997-42ea-888b-ff4a4c2b1988/volumes" Dec 05 17:51:26 crc kubenswrapper[4961]: I1205 17:51:26.873886 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e769efa9-1494-4b54-ac9f-97ea6bd0c55d" path="/var/lib/kubelet/pods/e769efa9-1494-4b54-ac9f-97ea6bd0c55d/volumes" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.185231 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8","Type":"ContainerStarted","Data":"039498f303d51baece9b54bac81344956ca84e7a1bd1fd79a37cc5728093db00"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.194393 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78bb69647d-95ptt" event={"ID":"a3dcddde-25f9-446a-8d5f-d9468cfa6940","Type":"ContainerStarted","Data":"fb29008208746247b9c88b86d8f98b81eeb15e421157f8d75c72dcd9c5fc13ec"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.195923 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65bb59d746-cqlw9" event={"ID":"01eef206-68f4-4923-8253-2a130ba0dca3","Type":"ContainerStarted","Data":"774812ddff6ecc48ef5bfe499a4b3c36832c79aa054864d429285d7f1156f23d"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.197934 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b07a4-55ec-4870-9e27-3b6e4d250b67","Type":"ContainerStarted","Data":"3497a998dc55834b0715a3369f9839f20b67a48110a058634ad6454f26c13b8d"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.203700 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerStarted","Data":"91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.212593 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vlqz4" event={"ID":"2cdd7a2f-7aea-4443-877f-a10bfb0a8512","Type":"ContainerStarted","Data":"22ef03d3a529b02d14b357ac4db14950e604db2b339187b53fdc5824efc75dab"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.212634 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vlqz4" event={"ID":"2cdd7a2f-7aea-4443-877f-a10bfb0a8512","Type":"ContainerStarted","Data":"f7810d505f7c704a29c79b2888e38196dadd4304ea1048cf34b87e083116258f"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.215355 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-db-sync-sd82p" event={"ID":"b58877bc-157d-4919-9418-e5b306dff028","Type":"ContainerDied","Data":"97ecb6ac6554d8f7c40eac4bcb6d7ea6ea9d3dbe548277eeb056e24fc68245d5"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.215521 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="97ecb6ac6554d8f7c40eac4bcb6d7ea6ea9d3dbe548277eeb056e24fc68245d5" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.215649 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-db-sync-sd82p" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.224257 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"188cd5cf5c224929129cd402f9176805e78d780ae9b10db1d84e2f2b311f1002"} Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.233297 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-bootstrap-vlqz4" podStartSLOduration=12.233282485 podStartE2EDuration="12.233282485s" podCreationTimestamp="2025-12-05 17:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:27.230885767 +0000 UTC m=+1093.292036240" watchObservedRunningTime="2025-12-05 17:51:27.233282485 +0000 UTC m=+1093.294432958" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.422180 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-b6c948c7-stf4x"] Dec 05 17:51:27 crc kubenswrapper[4961]: E1205 17:51:27.422894 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.422917 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" Dec 05 17:51:27 crc kubenswrapper[4961]: E1205 17:51:27.422937 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="init" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.422945 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="init" Dec 05 17:51:27 crc kubenswrapper[4961]: E1205 17:51:27.422962 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b58877bc-157d-4919-9418-e5b306dff028" containerName="neutron-db-sync" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.422970 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b58877bc-157d-4919-9418-e5b306dff028" containerName="neutron-db-sync" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.423825 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="27a3156a-6997-42ea-888b-ff4a4c2b1988" containerName="dnsmasq-dns" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.423853 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b58877bc-157d-4919-9418-e5b306dff028" containerName="neutron-db-sync" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.441663 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.463200 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b6c948c7-stf4x"] Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.541016 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-76c589f96d-p4cfs"] Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.545286 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.547384 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-neutron-dockercfg-99cns" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.548309 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-config" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.548606 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-ovndbs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.557551 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76c589f96d-p4cfs"] Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.557750 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-httpd-config" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.573418 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kgjsc\" (UniqueName: \"kubernetes.io/projected/779e9f2f-2533-432a-9ee4-0adb27af5405-kube-api-access-kgjsc\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.573492 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d6s7c\" (UniqueName: \"kubernetes.io/projected/5804df6a-9783-4952-84e2-deb85ddc3133-kube-api-access-d6s7c\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.573519 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-config\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.573870 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-sb\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.575627 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-dns-svc\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.575666 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-httpd-config\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.575958 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-combined-ca-bundle\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.576392 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-config\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.578001 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-nb\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.578089 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-ovndb-tls-certs\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685593 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-nb\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685648 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-ovndb-tls-certs\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685715 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kgjsc\" (UniqueName: \"kubernetes.io/projected/779e9f2f-2533-432a-9ee4-0adb27af5405-kube-api-access-kgjsc\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685748 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d6s7c\" (UniqueName: \"kubernetes.io/projected/5804df6a-9783-4952-84e2-deb85ddc3133-kube-api-access-d6s7c\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685766 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-config\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685817 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-sb\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685850 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-dns-svc\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685870 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-httpd-config\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685894 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-combined-ca-bundle\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.685927 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-config\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.686613 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-config\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.687695 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-sb\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.694062 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-nb\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.700167 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-httpd-config\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.700419 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-combined-ca-bundle\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.700965 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-dns-svc\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.704916 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-config\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.705978 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-ovndb-tls-certs\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.718116 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d6s7c\" (UniqueName: \"kubernetes.io/projected/5804df6a-9783-4952-84e2-deb85ddc3133-kube-api-access-d6s7c\") pod \"neutron-76c589f96d-p4cfs\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.719744 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kgjsc\" (UniqueName: \"kubernetes.io/projected/779e9f2f-2533-432a-9ee4-0adb27af5405-kube-api-access-kgjsc\") pod \"dnsmasq-dns-b6c948c7-stf4x\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.776409 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:27 crc kubenswrapper[4961]: I1205 17:51:27.891366 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.253125 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8","Type":"ContainerStarted","Data":"1236fafbb9e2e5cc64b3b0cf733f5e0bcc978e52c4a17a565c5198715611d214"} Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.256667 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78bb69647d-95ptt" event={"ID":"a3dcddde-25f9-446a-8d5f-d9468cfa6940","Type":"ContainerStarted","Data":"a5746352cd4818f868af771254a7c446e98aa2c2a98a8574f4b98ff9ec4fef82"} Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.263713 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65bb59d746-cqlw9" event={"ID":"01eef206-68f4-4923-8253-2a130ba0dca3","Type":"ContainerStarted","Data":"01aff3bd688c5b14a84d39236485cd65f7bf7cd0d578accde815230603894791"} Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.263762 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65bb59d746-cqlw9" event={"ID":"01eef206-68f4-4923-8253-2a130ba0dca3","Type":"ContainerStarted","Data":"b962d0a3c530d2aeff022acebc2fd4c86c94ea227e97d35c58d1ba851a3abe96"} Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.306015 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b07a4-55ec-4870-9e27-3b6e4d250b67","Type":"ContainerStarted","Data":"26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad"} Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.313873 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"46b69b337ca601fde34503b19390d6ad98b19de661f26e6e300a54009cf3b64f"} Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.333813 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-65bb59d746-cqlw9" podStartSLOduration=23.865847363 podStartE2EDuration="24.333792388s" podCreationTimestamp="2025-12-05 17:51:04 +0000 UTC" firstStartedPulling="2025-12-05 17:51:26.570789752 +0000 UTC m=+1092.631940225" lastFinishedPulling="2025-12-05 17:51:27.038734777 +0000 UTC m=+1093.099885250" observedRunningTime="2025-12-05 17:51:28.302288294 +0000 UTC m=+1094.363438767" watchObservedRunningTime="2025-12-05 17:51:28.333792388 +0000 UTC m=+1094.394942861" Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.431199 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-b6c948c7-stf4x"] Dec 05 17:51:28 crc kubenswrapper[4961]: I1205 17:51:28.738845 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-76c589f96d-p4cfs"] Dec 05 17:51:28 crc kubenswrapper[4961]: W1205 17:51:28.757411 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5804df6a_9783_4952_84e2_deb85ddc3133.slice/crio-cec8eb27733a95f61ef452cb9c733dab2428e77501b98340feba1e197ddf4f82 WatchSource:0}: Error finding container cec8eb27733a95f61ef452cb9c733dab2428e77501b98340feba1e197ddf4f82: Status 404 returned error can't find the container with id cec8eb27733a95f61ef452cb9c733dab2428e77501b98340feba1e197ddf4f82 Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.332051 4961 generic.go:334] "Generic (PLEG): container finished" podID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerID="1a7dce6d8d15e10b4a6cec51ec6150ced46cc2fcbe74962819ddd51f5e03f164" exitCode=0 Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.332296 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" event={"ID":"779e9f2f-2533-432a-9ee4-0adb27af5405","Type":"ContainerDied","Data":"1a7dce6d8d15e10b4a6cec51ec6150ced46cc2fcbe74962819ddd51f5e03f164"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.333432 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" event={"ID":"779e9f2f-2533-432a-9ee4-0adb27af5405","Type":"ContainerStarted","Data":"553a79ab2e6b26e0df55d2a6b75f9459498a8f4c14f0f70236147e77d1b74b2b"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.345140 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9q2wd" event={"ID":"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0","Type":"ContainerStarted","Data":"c40f9ceb45ddbcc3087afab86de2d6c09a8b2a30f24fde7d75109bb4222e2c3b"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.357158 4961 generic.go:334] "Generic (PLEG): container finished" podID="125fc58a-f251-4b81-98e7-eca6a2c72a8e" containerID="4050ed7379c04d5387701f9805b61b386236cc4ac4ae79db61a625cadb152ff2" exitCode=0 Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.357241 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7c2f" event={"ID":"125fc58a-f251-4b81-98e7-eca6a2c72a8e","Type":"ContainerDied","Data":"4050ed7379c04d5387701f9805b61b386236cc4ac4ae79db61a625cadb152ff2"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.360114 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"cb5c76fe8ad380608c2133cf2cb3d13943da935ae927e3dcf5bcc95c2ccc959f"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.360149 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"69b96dd48bc807f4892f5da38c6df499d9066f5d5e114634bc645580b8c8c665"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.367691 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8","Type":"ContainerStarted","Data":"140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.390814 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-db-sync-9q2wd" podStartSLOduration=2.652725935 podStartE2EDuration="34.390760551s" podCreationTimestamp="2025-12-05 17:50:55 +0000 UTC" firstStartedPulling="2025-12-05 17:50:56.754390579 +0000 UTC m=+1062.815541052" lastFinishedPulling="2025-12-05 17:51:28.492425195 +0000 UTC m=+1094.553575668" observedRunningTime="2025-12-05 17:51:29.385490232 +0000 UTC m=+1095.446640705" watchObservedRunningTime="2025-12-05 17:51:29.390760551 +0000 UTC m=+1095.451911034" Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.395740 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-78bb69647d-95ptt" event={"ID":"a3dcddde-25f9-446a-8d5f-d9468cfa6940","Type":"ContainerStarted","Data":"a163106435bdeaa7fc024d2400c30e7572a0ee592e66b3c46b09a7730ede8012"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.473324 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=14.473309468 podStartE2EDuration="14.473309468s" podCreationTimestamp="2025-12-05 17:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:29.43266133 +0000 UTC m=+1095.493811813" watchObservedRunningTime="2025-12-05 17:51:29.473309468 +0000 UTC m=+1095.534459941" Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.473569 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76c589f96d-p4cfs" event={"ID":"5804df6a-9783-4952-84e2-deb85ddc3133","Type":"ContainerStarted","Data":"e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.473598 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76c589f96d-p4cfs" event={"ID":"5804df6a-9783-4952-84e2-deb85ddc3133","Type":"ContainerStarted","Data":"29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.473607 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76c589f96d-p4cfs" event={"ID":"5804df6a-9783-4952-84e2-deb85ddc3133","Type":"ContainerStarted","Data":"cec8eb27733a95f61ef452cb9c733dab2428e77501b98340feba1e197ddf4f82"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.474527 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.493689 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b07a4-55ec-4870-9e27-3b6e4d250b67","Type":"ContainerStarted","Data":"eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981"} Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.496042 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/horizon-78bb69647d-95ptt" podStartSLOduration=25.025450768 podStartE2EDuration="25.496024117s" podCreationTimestamp="2025-12-05 17:51:04 +0000 UTC" firstStartedPulling="2025-12-05 17:51:26.570747561 +0000 UTC m=+1092.631898054" lastFinishedPulling="2025-12-05 17:51:27.04132093 +0000 UTC m=+1093.102471403" observedRunningTime="2025-12-05 17:51:29.473414651 +0000 UTC m=+1095.534565124" watchObservedRunningTime="2025-12-05 17:51:29.496024117 +0000 UTC m=+1095.557174590" Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.503633 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-76c589f96d-p4cfs" podStartSLOduration=2.5036160929999998 podStartE2EDuration="2.503616093s" podCreationTimestamp="2025-12-05 17:51:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:29.502340172 +0000 UTC m=+1095.563490655" watchObservedRunningTime="2025-12-05 17:51:29.503616093 +0000 UTC m=+1095.564766556" Dec 05 17:51:29 crc kubenswrapper[4961]: I1205 17:51:29.541141 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=14.541115544 podStartE2EDuration="14.541115544s" podCreationTimestamp="2025-12-05 17:51:15 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:29.524827734 +0000 UTC m=+1095.585978217" watchObservedRunningTime="2025-12-05 17:51:29.541115544 +0000 UTC m=+1095.602266037" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.154664 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-88c564b55-ktjt8"] Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.156878 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.160235 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-internal-svc" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.166196 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-neutron-public-svc" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.168109 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-88c564b55-ktjt8"] Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.243529 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-httpd-config\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.243617 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-combined-ca-bundle\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.243753 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqflw\" (UniqueName: \"kubernetes.io/projected/84b8d919-505e-44ba-b19a-532ec4df3533-kube-api-access-xqflw\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.243839 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-public-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.243919 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-ovndb-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.244001 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-config\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.244095 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-internal-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345573 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-httpd-config\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345617 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-combined-ca-bundle\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345660 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqflw\" (UniqueName: \"kubernetes.io/projected/84b8d919-505e-44ba-b19a-532ec4df3533-kube-api-access-xqflw\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345677 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-public-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345702 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-ovndb-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345729 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-config\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.345763 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-internal-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.351981 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-public-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.354719 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-ovndb-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.355062 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-httpd-config\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.355734 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-config\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.356983 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-combined-ca-bundle\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.364624 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/84b8d919-505e-44ba-b19a-532ec4df3533-internal-tls-certs\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.364693 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqflw\" (UniqueName: \"kubernetes.io/projected/84b8d919-505e-44ba-b19a-532ec4df3533-kube-api-access-xqflw\") pod \"neutron-88c564b55-ktjt8\" (UID: \"84b8d919-505e-44ba-b19a-532ec4df3533\") " pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.488093 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.503096 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" event={"ID":"779e9f2f-2533-432a-9ee4-0adb27af5405","Type":"ContainerStarted","Data":"0a71f92debbc45d49290618060f351b47f1d47c8a587ae8116e9bfc2533e201b"} Dec 05 17:51:30 crc kubenswrapper[4961]: I1205 17:51:30.520871 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" podStartSLOduration=3.52085496 podStartE2EDuration="3.52085496s" podCreationTimestamp="2025-12-05 17:51:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:30.519528838 +0000 UTC m=+1096.580679311" watchObservedRunningTime="2025-12-05 17:51:30.52085496 +0000 UTC m=+1096.582005433" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.016066 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7c2f" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.077794 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-scripts\") pod \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.078188 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-config-data\") pod \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.078307 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jnvd4\" (UniqueName: \"kubernetes.io/projected/125fc58a-f251-4b81-98e7-eca6a2c72a8e-kube-api-access-jnvd4\") pod \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.078355 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125fc58a-f251-4b81-98e7-eca6a2c72a8e-logs\") pod \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.078418 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-combined-ca-bundle\") pod \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\" (UID: \"125fc58a-f251-4b81-98e7-eca6a2c72a8e\") " Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.084234 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/125fc58a-f251-4b81-98e7-eca6a2c72a8e-logs" (OuterVolumeSpecName: "logs") pod "125fc58a-f251-4b81-98e7-eca6a2c72a8e" (UID: "125fc58a-f251-4b81-98e7-eca6a2c72a8e"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.104992 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/125fc58a-f251-4b81-98e7-eca6a2c72a8e-kube-api-access-jnvd4" (OuterVolumeSpecName: "kube-api-access-jnvd4") pod "125fc58a-f251-4b81-98e7-eca6a2c72a8e" (UID: "125fc58a-f251-4b81-98e7-eca6a2c72a8e"). InnerVolumeSpecName "kube-api-access-jnvd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.105123 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-scripts" (OuterVolumeSpecName: "scripts") pod "125fc58a-f251-4b81-98e7-eca6a2c72a8e" (UID: "125fc58a-f251-4b81-98e7-eca6a2c72a8e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.127458 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "125fc58a-f251-4b81-98e7-eca6a2c72a8e" (UID: "125fc58a-f251-4b81-98e7-eca6a2c72a8e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.171532 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-config-data" (OuterVolumeSpecName: "config-data") pod "125fc58a-f251-4b81-98e7-eca6a2c72a8e" (UID: "125fc58a-f251-4b81-98e7-eca6a2c72a8e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.180485 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.180514 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.180524 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jnvd4\" (UniqueName: \"kubernetes.io/projected/125fc58a-f251-4b81-98e7-eca6a2c72a8e-kube-api-access-jnvd4\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.180534 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/125fc58a-f251-4b81-98e7-eca6a2c72a8e-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.180542 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/125fc58a-f251-4b81-98e7-eca6a2c72a8e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.488949 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/placement-846ff7879b-wj44p"] Dec 05 17:51:31 crc kubenswrapper[4961]: E1205 17:51:31.489449 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="125fc58a-f251-4b81-98e7-eca6a2c72a8e" containerName="placement-db-sync" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.489464 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="125fc58a-f251-4b81-98e7-eca6a2c72a8e" containerName="placement-db-sync" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.489656 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="125fc58a-f251-4b81-98e7-eca6a2c72a8e" containerName="placement-db-sync" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.490553 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.495668 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-internal-svc" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.495841 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-placement-public-svc" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.522450 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-846ff7879b-wj44p"] Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.571012 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-db-sync-k7c2f" event={"ID":"125fc58a-f251-4b81-98e7-eca6a2c72a8e","Type":"ContainerDied","Data":"6237669b0ed6aa8e48afe57f7036ae3cb987b77acac964c204ba3c82c32c376d"} Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.571068 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/placement-db-sync-k7c2f" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.571088 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6237669b0ed6aa8e48afe57f7036ae3cb987b77acac964c204ba3c82c32c376d" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.573124 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614306 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-scripts\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614361 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-config-data\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614394 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e625db8-8bfa-4a00-957c-8a31f781da4f-logs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614453 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-public-tls-certs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614614 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-internal-tls-certs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614633 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-combined-ca-bundle\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.614680 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b969s\" (UniqueName: \"kubernetes.io/projected/9e625db8-8bfa-4a00-957c-8a31f781da4f-kube-api-access-b969s\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.664657 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-88c564b55-ktjt8"] Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.718943 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-internal-tls-certs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.718997 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-combined-ca-bundle\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.719050 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b969s\" (UniqueName: \"kubernetes.io/projected/9e625db8-8bfa-4a00-957c-8a31f781da4f-kube-api-access-b969s\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.719192 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-scripts\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.719217 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-config-data\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.719241 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e625db8-8bfa-4a00-957c-8a31f781da4f-logs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.719288 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-public-tls-certs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.722584 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9e625db8-8bfa-4a00-957c-8a31f781da4f-logs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.728842 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-combined-ca-bundle\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.729718 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-internal-tls-certs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.732144 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-scripts\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.733681 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-config-data\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.737744 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/9e625db8-8bfa-4a00-957c-8a31f781da4f-public-tls-certs\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.742614 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b969s\" (UniqueName: \"kubernetes.io/projected/9e625db8-8bfa-4a00-957c-8a31f781da4f-kube-api-access-b969s\") pod \"placement-846ff7879b-wj44p\" (UID: \"9e625db8-8bfa-4a00-957c-8a31f781da4f\") " pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:31 crc kubenswrapper[4961]: I1205 17:51:31.830028 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:32 crc kubenswrapper[4961]: I1205 17:51:32.565904 4961 generic.go:334] "Generic (PLEG): container finished" podID="2cdd7a2f-7aea-4443-877f-a10bfb0a8512" containerID="22ef03d3a529b02d14b357ac4db14950e604db2b339187b53fdc5824efc75dab" exitCode=0 Dec 05 17:51:32 crc kubenswrapper[4961]: I1205 17:51:32.565979 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vlqz4" event={"ID":"2cdd7a2f-7aea-4443-877f-a10bfb0a8512","Type":"ContainerDied","Data":"22ef03d3a529b02d14b357ac4db14950e604db2b339187b53fdc5824efc75dab"} Dec 05 17:51:32 crc kubenswrapper[4961]: I1205 17:51:32.576954 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"c2752e7cb1c0b46d96a7a0ea8819f99321df760b913d96d3331173efab5599cf"} Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.116707 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.117732 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.217710 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.218444 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.466049 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.466098 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.499604 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.504055 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.504088 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.527902 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.558652 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.562955 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.605217 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.605592 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.605703 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:51:35 crc kubenswrapper[4961]: I1205 17:51:35.605872 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.628125 4961 generic.go:334] "Generic (PLEG): container finished" podID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" containerID="c40f9ceb45ddbcc3087afab86de2d6c09a8b2a30f24fde7d75109bb4222e2c3b" exitCode=0 Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.628206 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9q2wd" event={"ID":"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0","Type":"ContainerDied","Data":"c40f9ceb45ddbcc3087afab86de2d6c09a8b2a30f24fde7d75109bb4222e2c3b"} Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.628416 4961 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.628426 4961 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.778004 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.833476 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-lrbxr"] Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.833732 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" podUID="884f8917-c720-4359-96d2-155b85958898" containerName="dnsmasq-dns" containerID="cri-o://d7f2b29c64bea537f4a7582ab2c06a54a6b8f245a050c48659ebc62b2e53b237" gracePeriod=10 Dec 05 17:51:37 crc kubenswrapper[4961]: I1205 17:51:37.923424 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:38 crc kubenswrapper[4961]: I1205 17:51:38.167537 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:51:38 crc kubenswrapper[4961]: I1205 17:51:38.638631 4961 generic.go:334] "Generic (PLEG): container finished" podID="884f8917-c720-4359-96d2-155b85958898" containerID="d7f2b29c64bea537f4a7582ab2c06a54a6b8f245a050c48659ebc62b2e53b237" exitCode=0 Dec 05 17:51:38 crc kubenswrapper[4961]: I1205 17:51:38.638685 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" event={"ID":"884f8917-c720-4359-96d2-155b85958898","Type":"ContainerDied","Data":"d7f2b29c64bea537f4a7582ab2c06a54a6b8f245a050c48659ebc62b2e53b237"} Dec 05 17:51:38 crc kubenswrapper[4961]: I1205 17:51:38.655212 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:51:38 crc kubenswrapper[4961]: I1205 17:51:38.655349 4961 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:51:38 crc kubenswrapper[4961]: I1205 17:51:38.906759 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:51:39 crc kubenswrapper[4961]: W1205 17:51:39.698412 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod84b8d919_505e_44ba_b19a_532ec4df3533.slice/crio-604d3c4a0a7cabc6c22b7772be2cf9a7088ea5092834ad596e827d7170baa9a6 WatchSource:0}: Error finding container 604d3c4a0a7cabc6c22b7772be2cf9a7088ea5092834ad596e827d7170baa9a6: Status 404 returned error can't find the container with id 604d3c4a0a7cabc6c22b7772be2cf9a7088ea5092834ad596e827d7170baa9a6 Dec 05 17:51:39 crc kubenswrapper[4961]: I1205 17:51:39.986870 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.080890 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.094479 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-combined-ca-bundle\") pod \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.094554 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-28c9w\" (UniqueName: \"kubernetes.io/projected/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-kube-api-access-28c9w\") pod \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.094589 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-config-data\") pod \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.094640 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-fernet-keys\") pod \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.094664 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-scripts\") pod \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.094731 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-credential-keys\") pod \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\" (UID: \"2cdd7a2f-7aea-4443-877f-a10bfb0a8512\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.103629 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-kube-api-access-28c9w" (OuterVolumeSpecName: "kube-api-access-28c9w") pod "2cdd7a2f-7aea-4443-877f-a10bfb0a8512" (UID: "2cdd7a2f-7aea-4443-877f-a10bfb0a8512"). InnerVolumeSpecName "kube-api-access-28c9w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.105959 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "2cdd7a2f-7aea-4443-877f-a10bfb0a8512" (UID: "2cdd7a2f-7aea-4443-877f-a10bfb0a8512"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.109002 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "2cdd7a2f-7aea-4443-877f-a10bfb0a8512" (UID: "2cdd7a2f-7aea-4443-877f-a10bfb0a8512"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.112684 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-scripts" (OuterVolumeSpecName: "scripts") pod "2cdd7a2f-7aea-4443-877f-a10bfb0a8512" (UID: "2cdd7a2f-7aea-4443-877f-a10bfb0a8512"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.163916 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.176813 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2cdd7a2f-7aea-4443-877f-a10bfb0a8512" (UID: "2cdd7a2f-7aea-4443-877f-a10bfb0a8512"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.196628 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-db-sync-config-data\") pod \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.196809 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-combined-ca-bundle\") pod \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.196932 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sfnr\" (UniqueName: \"kubernetes.io/projected/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-kube-api-access-8sfnr\") pod \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\" (UID: \"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.197271 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.197282 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-28c9w\" (UniqueName: \"kubernetes.io/projected/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-kube-api-access-28c9w\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.197294 4961 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.197301 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.197309 4961 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-credential-keys\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.204168 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-kube-api-access-8sfnr" (OuterVolumeSpecName: "kube-api-access-8sfnr") pod "c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" (UID: "c3a753a7-88b4-4e0c-a1c6-82e79643c6b0"). InnerVolumeSpecName "kube-api-access-8sfnr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.213448 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" (UID: "c3a753a7-88b4-4e0c-a1c6-82e79643c6b0"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.216944 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-config-data" (OuterVolumeSpecName: "config-data") pod "2cdd7a2f-7aea-4443-877f-a10bfb0a8512" (UID: "2cdd7a2f-7aea-4443-877f-a10bfb0a8512"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.263999 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" (UID: "c3a753a7-88b4-4e0c-a1c6-82e79643c6b0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.298533 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-sb\") pod \"884f8917-c720-4359-96d2-155b85958898\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.298646 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5c2pv\" (UniqueName: \"kubernetes.io/projected/884f8917-c720-4359-96d2-155b85958898-kube-api-access-5c2pv\") pod \"884f8917-c720-4359-96d2-155b85958898\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.298687 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-config\") pod \"884f8917-c720-4359-96d2-155b85958898\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.298720 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-dns-svc\") pod \"884f8917-c720-4359-96d2-155b85958898\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.298760 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-nb\") pod \"884f8917-c720-4359-96d2-155b85958898\" (UID: \"884f8917-c720-4359-96d2-155b85958898\") " Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.299246 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.299260 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sfnr\" (UniqueName: \"kubernetes.io/projected/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-kube-api-access-8sfnr\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.299271 4961 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.299279 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2cdd7a2f-7aea-4443-877f-a10bfb0a8512-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.317757 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/884f8917-c720-4359-96d2-155b85958898-kube-api-access-5c2pv" (OuterVolumeSpecName: "kube-api-access-5c2pv") pod "884f8917-c720-4359-96d2-155b85958898" (UID: "884f8917-c720-4359-96d2-155b85958898"). InnerVolumeSpecName "kube-api-access-5c2pv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.401407 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5c2pv\" (UniqueName: \"kubernetes.io/projected/884f8917-c720-4359-96d2-155b85958898-kube-api-access-5c2pv\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.525959 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/placement-846ff7879b-wj44p"] Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.530609 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "884f8917-c720-4359-96d2-155b85958898" (UID: "884f8917-c720-4359-96d2-155b85958898"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.548336 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "884f8917-c720-4359-96d2-155b85958898" (UID: "884f8917-c720-4359-96d2-155b85958898"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.560906 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-config" (OuterVolumeSpecName: "config") pod "884f8917-c720-4359-96d2-155b85958898" (UID: "884f8917-c720-4359-96d2-155b85958898"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.583251 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "884f8917-c720-4359-96d2-155b85958898" (UID: "884f8917-c720-4359-96d2-155b85958898"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.604751 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.604789 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.604798 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.604807 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/884f8917-c720-4359-96d2-155b85958898-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.693541 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.693589 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-56798b757f-lrbxr" event={"ID":"884f8917-c720-4359-96d2-155b85958898","Type":"ContainerDied","Data":"e287c5b40a6b0e6bd18f285ad3ab0d4c018bc142612edee8bd61823be4dc3925"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.694329 4961 scope.go:117] "RemoveContainer" containerID="d7f2b29c64bea537f4a7582ab2c06a54a6b8f245a050c48659ebc62b2e53b237" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.703166 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-db-sync-9q2wd" event={"ID":"c3a753a7-88b4-4e0c-a1c6-82e79643c6b0","Type":"ContainerDied","Data":"3e44a32dabac08d85e27e88dfffc20fb7fe3312f8764b7e8934127fb0b806bca"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.703214 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e44a32dabac08d85e27e88dfffc20fb7fe3312f8764b7e8934127fb0b806bca" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.703282 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-db-sync-9q2wd" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.719538 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-88c564b55-ktjt8" event={"ID":"84b8d919-505e-44ba-b19a-532ec4df3533","Type":"ContainerStarted","Data":"a6ed053efc2c3df2336c4a0d4d3106029bd0afb1abd9bcb6c432a12af46d4c1b"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.719678 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-88c564b55-ktjt8" event={"ID":"84b8d919-505e-44ba-b19a-532ec4df3533","Type":"ContainerStarted","Data":"604d3c4a0a7cabc6c22b7772be2cf9a7088ea5092834ad596e827d7170baa9a6"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.763268 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-846ff7879b-wj44p" event={"ID":"9e625db8-8bfa-4a00-957c-8a31f781da4f","Type":"ContainerStarted","Data":"bb8edbd847049afb97b069165781280c939f1b87b4877aacf7df664c625c2f84"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.770376 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-bootstrap-vlqz4" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.770898 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-bootstrap-vlqz4" event={"ID":"2cdd7a2f-7aea-4443-877f-a10bfb0a8512","Type":"ContainerDied","Data":"f7810d505f7c704a29c79b2888e38196dadd4304ea1048cf34b87e083116258f"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.770934 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f7810d505f7c704a29c79b2888e38196dadd4304ea1048cf34b87e083116258f" Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.814024 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"5360aef4f45bac83d87a8ba09957a1cf7ae3889644065793074bf6627a867fe9"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.841752 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerStarted","Data":"95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd"} Dec 05 17:51:40 crc kubenswrapper[4961]: I1205 17:51:40.970531 4961 scope.go:117] "RemoveContainer" containerID="e0a8d9d02a351869eb72ad4349033b0b259564f9845d083d46b1cd4deb7271f2" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.040034 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-lrbxr"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.057855 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-56798b757f-lrbxr"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.158315 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-7896fbd4bd-l9rg6"] Dec 05 17:51:41 crc kubenswrapper[4961]: E1205 17:51:41.159031 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="884f8917-c720-4359-96d2-155b85958898" containerName="init" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159049 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="884f8917-c720-4359-96d2-155b85958898" containerName="init" Dec 05 17:51:41 crc kubenswrapper[4961]: E1205 17:51:41.159073 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2cdd7a2f-7aea-4443-877f-a10bfb0a8512" containerName="keystone-bootstrap" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159079 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="2cdd7a2f-7aea-4443-877f-a10bfb0a8512" containerName="keystone-bootstrap" Dec 05 17:51:41 crc kubenswrapper[4961]: E1205 17:51:41.159098 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="884f8917-c720-4359-96d2-155b85958898" containerName="dnsmasq-dns" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159104 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="884f8917-c720-4359-96d2-155b85958898" containerName="dnsmasq-dns" Dec 05 17:51:41 crc kubenswrapper[4961]: E1205 17:51:41.159128 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" containerName="barbican-db-sync" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159135 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" containerName="barbican-db-sync" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159752 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="2cdd7a2f-7aea-4443-877f-a10bfb0a8512" containerName="keystone-bootstrap" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159805 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" containerName="barbican-db-sync" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.159824 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="884f8917-c720-4359-96d2-155b85958898" containerName="dnsmasq-dns" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.162871 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.167008 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-public-svc" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.167508 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-scripts" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.168797 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.175879 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-keystone-dockercfg-pp6pr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.176124 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-keystone-internal-svc" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.176296 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"keystone-config-data" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.215059 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7896fbd4bd-l9rg6"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.351703 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v7pgp\" (UniqueName: \"kubernetes.io/projected/3e200718-485e-49b6-b4ab-8311a9178f66-kube-api-access-v7pgp\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.352087 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-fernet-keys\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.352116 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-credential-keys\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.352156 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-public-tls-certs\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.352193 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-internal-tls-certs\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.360981 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-scripts\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.361067 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-combined-ca-bundle\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.361292 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-config-data\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.425546 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-worker-56bf9fd9dc-6zsc7"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.436668 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.444219 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-config-data" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.444533 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-worker-config-data" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.445867 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-barbican-dockercfg-wmfbp" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.451204 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-keystone-listener-7c7dc5bc58-54zmv"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463257 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-config-data-custom\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463300 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-config-data\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463337 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v7pgp\" (UniqueName: \"kubernetes.io/projected/3e200718-485e-49b6-b4ab-8311a9178f66-kube-api-access-v7pgp\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463354 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-config-data\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463369 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b291fd5-a251-45db-8c2b-334f43909f1f-logs\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463399 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-fernet-keys\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463421 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-credential-keys\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463452 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-combined-ca-bundle\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463469 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-public-tls-certs\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463539 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6jlnb\" (UniqueName: \"kubernetes.io/projected/1b291fd5-a251-45db-8c2b-334f43909f1f-kube-api-access-6jlnb\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463572 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-internal-tls-certs\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463591 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-scripts\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.463616 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-combined-ca-bundle\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.466456 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.468522 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-keystone-listener-config-data" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.483625 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-fernet-keys\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.498385 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-56bf9fd9dc-6zsc7"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.502366 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-credential-keys\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.502764 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-config-data\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.520554 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-combined-ca-bundle\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.530840 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7c7dc5bc58-54zmv"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.548879 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-scripts\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.565207 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-config-data-custom\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.565259 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-config-data\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.565276 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b291fd5-a251-45db-8c2b-334f43909f1f-logs\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.565328 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-combined-ca-bundle\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.565365 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6jlnb\" (UniqueName: \"kubernetes.io/projected/1b291fd5-a251-45db-8c2b-334f43909f1f-kube-api-access-6jlnb\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.566211 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-internal-tls-certs\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.566313 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/1b291fd5-a251-45db-8c2b-334f43909f1f-logs\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.567924 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v7pgp\" (UniqueName: \"kubernetes.io/projected/3e200718-485e-49b6-b4ab-8311a9178f66-kube-api-access-v7pgp\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.568060 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-798d46d59c-mt8xr"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.570485 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/3e200718-485e-49b6-b4ab-8311a9178f66-public-tls-certs\") pod \"keystone-7896fbd4bd-l9rg6\" (UID: \"3e200718-485e-49b6-b4ab-8311a9178f66\") " pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.580755 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.585861 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-798d46d59c-mt8xr"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.590638 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-config-data-custom\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.598916 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-combined-ca-bundle\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.610992 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1b291fd5-a251-45db-8c2b-334f43909f1f-config-data\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.611285 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6jlnb\" (UniqueName: \"kubernetes.io/projected/1b291fd5-a251-45db-8c2b-334f43909f1f-kube-api-access-6jlnb\") pod \"barbican-worker-56bf9fd9dc-6zsc7\" (UID: \"1b291fd5-a251-45db-8c2b-334f43909f1f\") " pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.666728 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cw4rt\" (UniqueName: \"kubernetes.io/projected/9db816ec-0b58-40b0-a063-974df541802b-kube-api-access-cw4rt\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.666859 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-config-data\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.666909 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9db816ec-0b58-40b0-a063-974df541802b-logs\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.666960 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-config-data-custom\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.666987 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-combined-ca-bundle\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768488 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cw4rt\" (UniqueName: \"kubernetes.io/projected/9db816ec-0b58-40b0-a063-974df541802b-kube-api-access-cw4rt\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768551 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-dns-svc\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768611 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-config-data\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768630 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-nb\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768659 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pbkm\" (UniqueName: \"kubernetes.io/projected/76026b95-0450-46e9-a72b-732687c4e833-kube-api-access-5pbkm\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768681 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9db816ec-0b58-40b0-a063-974df541802b-logs\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768718 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-sb\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768737 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-config-data-custom\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.768757 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-combined-ca-bundle\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.772821 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-config\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.773369 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/9db816ec-0b58-40b0-a063-974df541802b-logs\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.781279 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-combined-ca-bundle\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.781645 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-59df8b55c8-ff94g"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.787698 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.794395 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-config-data-custom\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.796378 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"barbican-api-config-data" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.797692 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9db816ec-0b58-40b0-a063-974df541802b-config-data\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.805920 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.835255 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.838040 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cw4rt\" (UniqueName: \"kubernetes.io/projected/9db816ec-0b58-40b0-a063-974df541802b-kube-api-access-cw4rt\") pod \"barbican-keystone-listener-7c7dc5bc58-54zmv\" (UID: \"9db816ec-0b58-40b0-a063-974df541802b\") " pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.847696 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-59df8b55c8-ff94g"] Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.851494 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.864302 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-88c564b55-ktjt8" event={"ID":"84b8d919-505e-44ba-b19a-532ec4df3533","Type":"ContainerStarted","Data":"da97cc1f303eac36579584417a8c96580dcf8c087f658a55cbf0213abdc4ac4d"} Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.865269 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876678 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-sb\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876752 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d65359f-708b-487d-8498-6f8537d5a443-logs\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876801 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-config\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876845 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7cr2z\" (UniqueName: \"kubernetes.io/projected/0d65359f-708b-487d-8498-6f8537d5a443-kube-api-access-7cr2z\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876868 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-dns-svc\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876886 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data-custom\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876917 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-combined-ca-bundle\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876950 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-nb\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.876986 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pbkm\" (UniqueName: \"kubernetes.io/projected/76026b95-0450-46e9-a72b-732687c4e833-kube-api-access-5pbkm\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.877013 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.877936 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-sb\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.878529 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-config\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.879163 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-dns-svc\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.881029 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-nb\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.901048 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pvww2" event={"ID":"62cd878c-721b-46b4-87bb-1573a9fcf6d9","Type":"ContainerStarted","Data":"35f6ce998ecf0ac3757a2bb7201aba75818978a7420ffd58d7d2f9c5e93ed279"} Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.937106 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-846ff7879b-wj44p" event={"ID":"9e625db8-8bfa-4a00-957c-8a31f781da4f","Type":"ContainerStarted","Data":"a6b8d3bb2355a8d9cb835345336fc73062cd5c55ba9b7376b47afa4709f34c92"} Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.937601 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pbkm\" (UniqueName: \"kubernetes.io/projected/76026b95-0450-46e9-a72b-732687c4e833-kube-api-access-5pbkm\") pod \"dnsmasq-dns-798d46d59c-mt8xr\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.959300 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.959964 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-88c564b55-ktjt8" podStartSLOduration=11.959946144 podStartE2EDuration="11.959946144s" podCreationTimestamp="2025-12-05 17:51:30 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:41.946683499 +0000 UTC m=+1108.007833972" watchObservedRunningTime="2025-12-05 17:51:41.959946144 +0000 UTC m=+1108.021096617" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.968595 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"80bf2199b436dce13d9c5010c1c8b30ee232a463eb74c94d90f4a9f0365464de"} Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.968637 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"2405d1eebaf26bf19157e3e708dfffc75913b0b9dd5757579f55a6e3896c43c2"} Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.978857 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.978970 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d65359f-708b-487d-8498-6f8537d5a443-logs\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.979095 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7cr2z\" (UniqueName: \"kubernetes.io/projected/0d65359f-708b-487d-8498-6f8537d5a443-kube-api-access-7cr2z\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.979130 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data-custom\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.979229 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-combined-ca-bundle\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.980684 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d65359f-708b-487d-8498-6f8537d5a443-logs\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.990236 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data-custom\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.993354 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:41 crc kubenswrapper[4961]: I1205 17:51:41.994167 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-combined-ca-bundle\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.004093 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7cr2z\" (UniqueName: \"kubernetes.io/projected/0d65359f-708b-487d-8498-6f8537d5a443-kube-api-access-7cr2z\") pod \"barbican-api-59df8b55c8-ff94g\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.010622 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-db-sync-pvww2" podStartSLOduration=3.379175219 podStartE2EDuration="47.010598609s" podCreationTimestamp="2025-12-05 17:50:55 +0000 UTC" firstStartedPulling="2025-12-05 17:50:56.535196284 +0000 UTC m=+1062.596346757" lastFinishedPulling="2025-12-05 17:51:40.166619674 +0000 UTC m=+1106.227770147" observedRunningTime="2025-12-05 17:51:41.982038017 +0000 UTC m=+1108.043188490" watchObservedRunningTime="2025-12-05 17:51:42.010598609 +0000 UTC m=+1108.071749082" Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.285509 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.786401 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-worker-56bf9fd9dc-6zsc7"] Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.819857 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-keystone-listener-7c7dc5bc58-54zmv"] Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.901142 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="884f8917-c720-4359-96d2-155b85958898" path="/var/lib/kubelet/pods/884f8917-c720-4359-96d2-155b85958898/volumes" Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.902123 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-7896fbd4bd-l9rg6"] Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.951896 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-59df8b55c8-ff94g"] Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.994255 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" event={"ID":"1b291fd5-a251-45db-8c2b-334f43909f1f","Type":"ContainerStarted","Data":"e32effe90291a30cfaf0219ddb356e6d0aaedc5a9e504a7754582f7afbfcc63a"} Dec 05 17:51:42 crc kubenswrapper[4961]: I1205 17:51:42.997038 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" event={"ID":"9db816ec-0b58-40b0-a063-974df541802b","Type":"ContainerStarted","Data":"7a9ee853fb53e55c6aad4928b2f6ef6e3e10a142a512d83bebf1ecc21fdd7741"} Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.017906 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7896fbd4bd-l9rg6" event={"ID":"3e200718-485e-49b6-b4ab-8311a9178f66","Type":"ContainerStarted","Data":"d4b0c3f01b0e2f2e2b65edb24362469b30b664b7cd6a2d5437eaf8726cad771d"} Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.032239 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-798d46d59c-mt8xr"] Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.036579 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/placement-846ff7879b-wj44p" event={"ID":"9e625db8-8bfa-4a00-957c-8a31f781da4f","Type":"ContainerStarted","Data":"231979a2fd1e24105aeaa84d13a90344da19c6d074b2b8cc579eb30ad6c5d011"} Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.036860 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.036909 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.045812 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59df8b55c8-ff94g" event={"ID":"0d65359f-708b-487d-8498-6f8537d5a443","Type":"ContainerStarted","Data":"e2516ff3539ba7a3de869cac5975202340c24114aa01e121f842645c839e17ba"} Dec 05 17:51:43 crc kubenswrapper[4961]: W1205 17:51:43.055444 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76026b95_0450_46e9_a72b_732687c4e833.slice/crio-77d21e1a4bd153e2c5dd02a8d398a3fee5cc749018710a66c476b0748e45e876 WatchSource:0}: Error finding container 77d21e1a4bd153e2c5dd02a8d398a3fee5cc749018710a66c476b0748e45e876: Status 404 returned error can't find the container with id 77d21e1a4bd153e2c5dd02a8d398a3fee5cc749018710a66c476b0748e45e876 Dec 05 17:51:43 crc kubenswrapper[4961]: I1205 17:51:43.071027 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/placement-846ff7879b-wj44p" podStartSLOduration=12.071008266 podStartE2EDuration="12.071008266s" podCreationTimestamp="2025-12-05 17:51:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:43.05853488 +0000 UTC m=+1109.119685363" watchObservedRunningTime="2025-12-05 17:51:43.071008266 +0000 UTC m=+1109.132158739" Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.063033 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-7896fbd4bd-l9rg6" event={"ID":"3e200718-485e-49b6-b4ab-8311a9178f66","Type":"ContainerStarted","Data":"12fc1fbe85e288c7818bbe35b8d5be6096c293961b55469a48edf09f1b961f44"} Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.063859 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.067356 4961 generic.go:334] "Generic (PLEG): container finished" podID="76026b95-0450-46e9-a72b-732687c4e833" containerID="57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb" exitCode=0 Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.067437 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" event={"ID":"76026b95-0450-46e9-a72b-732687c4e833","Type":"ContainerDied","Data":"57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb"} Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.067468 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" event={"ID":"76026b95-0450-46e9-a72b-732687c4e833","Type":"ContainerStarted","Data":"77d21e1a4bd153e2c5dd02a8d398a3fee5cc749018710a66c476b0748e45e876"} Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.088065 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"63fc1a72a718cac5d47452a631ab4564d021f08dfc6d3b93342d1f0858d01b60"} Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.091826 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-7896fbd4bd-l9rg6" podStartSLOduration=3.09180013 podStartE2EDuration="3.09180013s" podCreationTimestamp="2025-12-05 17:51:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:44.090075248 +0000 UTC m=+1110.151225721" watchObservedRunningTime="2025-12-05 17:51:44.09180013 +0000 UTC m=+1110.152950603" Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.113670 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59df8b55c8-ff94g" event={"ID":"0d65359f-708b-487d-8498-6f8537d5a443","Type":"ContainerStarted","Data":"5720bba3c727018f5b87f03f054c3c0ed9cdc617ffbd5dfcc1e37236f64be613"} Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.113710 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59df8b55c8-ff94g" event={"ID":"0d65359f-708b-487d-8498-6f8537d5a443","Type":"ContainerStarted","Data":"afe906a198e095507e3a0a32303131350a8bcb595fa6b349ef2155d6a93d6984"} Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.113722 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.114138 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:44 crc kubenswrapper[4961]: I1205 17:51:44.186928 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-59df8b55c8-ff94g" podStartSLOduration=3.186898776 podStartE2EDuration="3.186898776s" podCreationTimestamp="2025-12-05 17:51:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:44.162986179 +0000 UTC m=+1110.224136652" watchObservedRunningTime="2025-12-05 17:51:44.186898776 +0000 UTC m=+1110.248049269" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.118650 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-65bb59d746-cqlw9" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.129152 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" event={"ID":"76026b95-0450-46e9-a72b-732687c4e833","Type":"ContainerStarted","Data":"f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe"} Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.129205 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.142157 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"981cf619570c5383f552e3dd7345b66545223f562bd9f59ad9ec88875955a389"} Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.142192 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"afbf6569bc21f4447a5a7adecd0810d3d97caf04dc0299a2a77243270b7a0c99"} Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.142203 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"4030722c9bdb811e5a9b6adfd0decaea4333853cc3bee587f0362143296e990a"} Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.149421 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/barbican-api-5d86bc7b8-5vlfw"] Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.151738 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.153764 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-internal-svc" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.155426 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-barbican-public-svc" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.156753 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" podStartSLOduration=4.156734839 podStartE2EDuration="4.156734839s" podCreationTimestamp="2025-12-05 17:51:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:45.150125567 +0000 UTC m=+1111.211276040" watchObservedRunningTime="2025-12-05 17:51:45.156734839 +0000 UTC m=+1111.217885312" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.175745 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5d86bc7b8-5vlfw"] Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.219447 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/horizon-78bb69647d-95ptt" podUID="a3dcddde-25f9-446a-8d5f-d9468cfa6940" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.146:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.146:8443: connect: connection refused" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.259270 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-internal-tls-certs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.259632 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-njq66\" (UniqueName: \"kubernetes.io/projected/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-kube-api-access-njq66\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.259709 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-public-tls-certs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.259832 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-config-data-custom\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.259891 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-config-data\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.259970 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-combined-ca-bundle\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.260050 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-logs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362394 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-njq66\" (UniqueName: \"kubernetes.io/projected/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-kube-api-access-njq66\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362467 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-public-tls-certs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362526 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-config-data-custom\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362563 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-config-data\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362604 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-combined-ca-bundle\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362653 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-logs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.362717 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-internal-tls-certs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.367585 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-logs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.368834 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-internal-tls-certs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.370880 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-public-tls-certs\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.371357 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-config-data-custom\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.376345 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-combined-ca-bundle\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.394808 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-njq66\" (UniqueName: \"kubernetes.io/projected/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-kube-api-access-njq66\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.397013 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c8daeb2b-9caa-4a76-b22a-e3320f0235a0-config-data\") pod \"barbican-api-5d86bc7b8-5vlfw\" (UID: \"c8daeb2b-9caa-4a76-b22a-e3320f0235a0\") " pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:45 crc kubenswrapper[4961]: I1205 17:51:45.476080 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:46 crc kubenswrapper[4961]: I1205 17:51:46.160110 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"72ce56672bfeb05fbbb543be8193b3028b259a6734c39dd9a7642aacc50d0c8d"} Dec 05 17:51:46 crc kubenswrapper[4961]: I1205 17:51:46.336570 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/barbican-api-5d86bc7b8-5vlfw"] Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.171833 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d86bc7b8-5vlfw" event={"ID":"c8daeb2b-9caa-4a76-b22a-e3320f0235a0","Type":"ContainerStarted","Data":"f393227e73314f51ed1539effe4de962a41df64fb564ffbd7449b560cce40bdb"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.172074 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d86bc7b8-5vlfw" event={"ID":"c8daeb2b-9caa-4a76-b22a-e3320f0235a0","Type":"ContainerStarted","Data":"dcef45a377a914fc2f1d79c62072d31101fa40657bdd8c9c7f5665bfefcc9201"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.172090 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-5d86bc7b8-5vlfw" event={"ID":"c8daeb2b-9caa-4a76-b22a-e3320f0235a0","Type":"ContainerStarted","Data":"29fa6fa452a2a8d4ad7c237de607c5199e0ece3cac05f42d66f85a92aef144f1"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.172206 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.176286 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" event={"ID":"9db816ec-0b58-40b0-a063-974df541802b","Type":"ContainerStarted","Data":"748616b0a49ddd6aad5cb7c486886c0e803c076c28e21976d7c41e37ef525d65"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.176322 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" event={"ID":"9db816ec-0b58-40b0-a063-974df541802b","Type":"ContainerStarted","Data":"ff431ed0ab21fd987b7330567a788916b9f29c6b7594b8033b603e8ccade0052"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.192462 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"eb4ba688a248d68597a76254152a3f7ac1e89c943ca7db452b1bbc148280fe31"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.192678 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-storage-0" event={"ID":"e533098a-ca28-487e-8471-7a426defda37","Type":"ContainerStarted","Data":"0d81d3fc9fcbcaec8fcf4cea5bce7b271c08daa8b65496d66f3dc3e8bd1271fe"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.195589 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" event={"ID":"1b291fd5-a251-45db-8c2b-334f43909f1f","Type":"ContainerStarted","Data":"5a3baf6a00f2c8feb7bb69c0203fd010cf86b8c849fefe928e0b3f14ad310138"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.195622 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" event={"ID":"1b291fd5-a251-45db-8c2b-334f43909f1f","Type":"ContainerStarted","Data":"e62c7553795e1e061b4d58bb1736aa14e8f072e486b773cf118ec057e7709e32"} Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.198218 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-api-5d86bc7b8-5vlfw" podStartSLOduration=2.198202765 podStartE2EDuration="2.198202765s" podCreationTimestamp="2025-12-05 17:51:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:47.196459991 +0000 UTC m=+1113.257610464" watchObservedRunningTime="2025-12-05 17:51:47.198202765 +0000 UTC m=+1113.259353238" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.235419 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-worker-56bf9fd9dc-6zsc7" podStartSLOduration=2.493310649 podStartE2EDuration="6.235398648s" podCreationTimestamp="2025-12-05 17:51:41 +0000 UTC" firstStartedPulling="2025-12-05 17:51:42.814733351 +0000 UTC m=+1108.875883824" lastFinishedPulling="2025-12-05 17:51:46.55682135 +0000 UTC m=+1112.617971823" observedRunningTime="2025-12-05 17:51:47.217287713 +0000 UTC m=+1113.278438206" watchObservedRunningTime="2025-12-05 17:51:47.235398648 +0000 UTC m=+1113.296549121" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.251251 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/barbican-keystone-listener-7c7dc5bc58-54zmv" podStartSLOduration=3.308984176 podStartE2EDuration="6.251228907s" podCreationTimestamp="2025-12-05 17:51:41 +0000 UTC" firstStartedPulling="2025-12-05 17:51:42.873040044 +0000 UTC m=+1108.934190517" lastFinishedPulling="2025-12-05 17:51:45.815284775 +0000 UTC m=+1111.876435248" observedRunningTime="2025-12-05 17:51:47.238363651 +0000 UTC m=+1113.299514144" watchObservedRunningTime="2025-12-05 17:51:47.251228907 +0000 UTC m=+1113.312379380" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.281727 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-storage-0" podStartSLOduration=94.426643962 podStartE2EDuration="1m52.281710515s" podCreationTimestamp="2025-12-05 17:49:55 +0000 UTC" firstStartedPulling="2025-12-05 17:51:25.668658193 +0000 UTC m=+1091.729808676" lastFinishedPulling="2025-12-05 17:51:43.523724756 +0000 UTC m=+1109.584875229" observedRunningTime="2025-12-05 17:51:47.277757349 +0000 UTC m=+1113.338907832" watchObservedRunningTime="2025-12-05 17:51:47.281710515 +0000 UTC m=+1113.342860988" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.576964 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-798d46d59c-mt8xr"] Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.577186 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" podUID="76026b95-0450-46e9-a72b-732687c4e833" containerName="dnsmasq-dns" containerID="cri-o://f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe" gracePeriod=10 Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.617182 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-9wxdp"] Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.635638 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.637794 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-9wxdp"] Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.666042 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"dns-swift-storage-0" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.713676 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.713752 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-svc\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.713813 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-988n2\" (UniqueName: \"kubernetes.io/projected/4a4e68e8-4547-458b-85bf-c898c9abcf2c-kube-api-access-988n2\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.713874 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.713920 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.713940 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-config\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.815762 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-988n2\" (UniqueName: \"kubernetes.io/projected/4a4e68e8-4547-458b-85bf-c898c9abcf2c-kube-api-access-988n2\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.816244 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.816294 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.816314 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-config\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.816383 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.816411 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-svc\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.817430 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-svc\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.818387 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-nb\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.819005 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-swift-storage-0\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.819894 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-config\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.820631 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-sb\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:47 crc kubenswrapper[4961]: I1205 17:51:47.840717 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-988n2\" (UniqueName: \"kubernetes.io/projected/4a4e68e8-4547-458b-85bf-c898c9abcf2c-kube-api-access-988n2\") pod \"dnsmasq-dns-688c87cc99-9wxdp\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.037499 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.176889 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.215688 4961 generic.go:334] "Generic (PLEG): container finished" podID="76026b95-0450-46e9-a72b-732687c4e833" containerID="f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe" exitCode=0 Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.216724 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.217280 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" event={"ID":"76026b95-0450-46e9-a72b-732687c4e833","Type":"ContainerDied","Data":"f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe"} Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.217313 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-798d46d59c-mt8xr" event={"ID":"76026b95-0450-46e9-a72b-732687c4e833","Type":"ContainerDied","Data":"77d21e1a4bd153e2c5dd02a8d398a3fee5cc749018710a66c476b0748e45e876"} Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.217337 4961 scope.go:117] "RemoveContainer" containerID="f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.218207 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.230357 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-sb\") pod \"76026b95-0450-46e9-a72b-732687c4e833\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.230461 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-dns-svc\") pod \"76026b95-0450-46e9-a72b-732687c4e833\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.230593 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pbkm\" (UniqueName: \"kubernetes.io/projected/76026b95-0450-46e9-a72b-732687c4e833-kube-api-access-5pbkm\") pod \"76026b95-0450-46e9-a72b-732687c4e833\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.230663 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-config\") pod \"76026b95-0450-46e9-a72b-732687c4e833\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.230750 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-nb\") pod \"76026b95-0450-46e9-a72b-732687c4e833\" (UID: \"76026b95-0450-46e9-a72b-732687c4e833\") " Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.238298 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76026b95-0450-46e9-a72b-732687c4e833-kube-api-access-5pbkm" (OuterVolumeSpecName: "kube-api-access-5pbkm") pod "76026b95-0450-46e9-a72b-732687c4e833" (UID: "76026b95-0450-46e9-a72b-732687c4e833"). InnerVolumeSpecName "kube-api-access-5pbkm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.308497 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-config" (OuterVolumeSpecName: "config") pod "76026b95-0450-46e9-a72b-732687c4e833" (UID: "76026b95-0450-46e9-a72b-732687c4e833"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.311257 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76026b95-0450-46e9-a72b-732687c4e833" (UID: "76026b95-0450-46e9-a72b-732687c4e833"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.314343 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76026b95-0450-46e9-a72b-732687c4e833" (UID: "76026b95-0450-46e9-a72b-732687c4e833"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.333146 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.333176 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pbkm\" (UniqueName: \"kubernetes.io/projected/76026b95-0450-46e9-a72b-732687c4e833-kube-api-access-5pbkm\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.333185 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.333196 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.355468 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76026b95-0450-46e9-a72b-732687c4e833" (UID: "76026b95-0450-46e9-a72b-732687c4e833"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.378605 4961 scope.go:117] "RemoveContainer" containerID="57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.427978 4961 scope.go:117] "RemoveContainer" containerID="f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe" Dec 05 17:51:48 crc kubenswrapper[4961]: E1205 17:51:48.429139 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe\": container with ID starting with f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe not found: ID does not exist" containerID="f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.429202 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe"} err="failed to get container status \"f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe\": rpc error: code = NotFound desc = could not find container \"f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe\": container with ID starting with f96ede31ee9fd689ac163679d9aec96d2c027ced3f07afbfe808b07cd2c20afe not found: ID does not exist" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.429230 4961 scope.go:117] "RemoveContainer" containerID="57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb" Dec 05 17:51:48 crc kubenswrapper[4961]: E1205 17:51:48.430363 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb\": container with ID starting with 57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb not found: ID does not exist" containerID="57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.430406 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb"} err="failed to get container status \"57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb\": rpc error: code = NotFound desc = could not find container \"57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb\": container with ID starting with 57a9ab799af8f49402a87a457ad50f8304ce7552b75b0b3c3cf38843fbcd82bb not found: ID does not exist" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.436836 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76026b95-0450-46e9-a72b-732687c4e833-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.443597 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-9wxdp"] Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.567148 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-798d46d59c-mt8xr"] Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.576620 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-798d46d59c-mt8xr"] Dec 05 17:51:48 crc kubenswrapper[4961]: I1205 17:51:48.882367 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76026b95-0450-46e9-a72b-732687c4e833" path="/var/lib/kubelet/pods/76026b95-0450-46e9-a72b-732687c4e833/volumes" Dec 05 17:51:49 crc kubenswrapper[4961]: I1205 17:51:49.241671 4961 generic.go:334] "Generic (PLEG): container finished" podID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" containerID="35f6ce998ecf0ac3757a2bb7201aba75818978a7420ffd58d7d2f9c5e93ed279" exitCode=0 Dec 05 17:51:49 crc kubenswrapper[4961]: I1205 17:51:49.241726 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pvww2" event={"ID":"62cd878c-721b-46b4-87bb-1573a9fcf6d9","Type":"ContainerDied","Data":"35f6ce998ecf0ac3757a2bb7201aba75818978a7420ffd58d7d2f9c5e93ed279"} Dec 05 17:51:49 crc kubenswrapper[4961]: I1205 17:51:49.249403 4961 generic.go:334] "Generic (PLEG): container finished" podID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerID="02df47551303860d22d86f61701e243f1f26c306b292602c04fb48b9414174e4" exitCode=0 Dec 05 17:51:49 crc kubenswrapper[4961]: I1205 17:51:49.249461 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" event={"ID":"4a4e68e8-4547-458b-85bf-c898c9abcf2c","Type":"ContainerDied","Data":"02df47551303860d22d86f61701e243f1f26c306b292602c04fb48b9414174e4"} Dec 05 17:51:49 crc kubenswrapper[4961]: I1205 17:51:49.249491 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" event={"ID":"4a4e68e8-4547-458b-85bf-c898c9abcf2c","Type":"ContainerStarted","Data":"35c1ae7fe3b9672c7afdd685c13ff2c0135d76be245a9328f4b8cdec075515b3"} Dec 05 17:51:53 crc kubenswrapper[4961]: I1205 17:51:53.719860 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:53 crc kubenswrapper[4961]: I1205 17:51:53.877012 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.536110 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pvww2" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.587745 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-config-data\") pod \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.587835 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-combined-ca-bundle\") pod \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.587987 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rjqbl\" (UniqueName: \"kubernetes.io/projected/62cd878c-721b-46b4-87bb-1573a9fcf6d9-kube-api-access-rjqbl\") pod \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.588032 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-db-sync-config-data\") pod \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.588082 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-scripts\") pod \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.588278 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62cd878c-721b-46b4-87bb-1573a9fcf6d9-etc-machine-id\") pod \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\" (UID: \"62cd878c-721b-46b4-87bb-1573a9fcf6d9\") " Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.588868 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/62cd878c-721b-46b4-87bb-1573a9fcf6d9-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "62cd878c-721b-46b4-87bb-1573a9fcf6d9" (UID: "62cd878c-721b-46b4-87bb-1573a9fcf6d9"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.593739 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-scripts" (OuterVolumeSpecName: "scripts") pod "62cd878c-721b-46b4-87bb-1573a9fcf6d9" (UID: "62cd878c-721b-46b4-87bb-1573a9fcf6d9"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.593834 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-db-sync-config-data" (OuterVolumeSpecName: "db-sync-config-data") pod "62cd878c-721b-46b4-87bb-1573a9fcf6d9" (UID: "62cd878c-721b-46b4-87bb-1573a9fcf6d9"). InnerVolumeSpecName "db-sync-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.594491 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62cd878c-721b-46b4-87bb-1573a9fcf6d9-kube-api-access-rjqbl" (OuterVolumeSpecName: "kube-api-access-rjqbl") pod "62cd878c-721b-46b4-87bb-1573a9fcf6d9" (UID: "62cd878c-721b-46b4-87bb-1573a9fcf6d9"). InnerVolumeSpecName "kube-api-access-rjqbl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.634820 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "62cd878c-721b-46b4-87bb-1573a9fcf6d9" (UID: "62cd878c-721b-46b4-87bb-1573a9fcf6d9"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.637952 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-config-data" (OuterVolumeSpecName: "config-data") pod "62cd878c-721b-46b4-87bb-1573a9fcf6d9" (UID: "62cd878c-721b-46b4-87bb-1573a9fcf6d9"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.690309 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.690344 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.690360 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rjqbl\" (UniqueName: \"kubernetes.io/projected/62cd878c-721b-46b4-87bb-1573a9fcf6d9-kube-api-access-rjqbl\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.690370 4961 reconciler_common.go:293] "Volume detached for volume \"db-sync-config-data\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-db-sync-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.690381 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/62cd878c-721b-46b4-87bb-1573a9fcf6d9-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:54 crc kubenswrapper[4961]: I1205 17:51:54.690389 4961 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/62cd878c-721b-46b4-87bb-1573a9fcf6d9-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.330987 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-db-sync-pvww2" event={"ID":"62cd878c-721b-46b4-87bb-1573a9fcf6d9","Type":"ContainerDied","Data":"bef5a36b52934ba69bb40de54cdeab785fdb161760596ebae67ad96c35d76968"} Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.331483 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bef5a36b52934ba69bb40de54cdeab785fdb161760596ebae67ad96c35d76968" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.331574 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-db-sync-pvww2" Dec 05 17:51:55 crc kubenswrapper[4961]: E1205 17:51:55.539180 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceilometer-central-agent\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/ceilometer-0" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.875342 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:51:55 crc kubenswrapper[4961]: E1205 17:51:55.875831 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76026b95-0450-46e9-a72b-732687c4e833" containerName="init" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.875848 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="76026b95-0450-46e9-a72b-732687c4e833" containerName="init" Dec 05 17:51:55 crc kubenswrapper[4961]: E1205 17:51:55.875865 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" containerName="cinder-db-sync" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.875873 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" containerName="cinder-db-sync" Dec 05 17:51:55 crc kubenswrapper[4961]: E1205 17:51:55.875885 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76026b95-0450-46e9-a72b-732687c4e833" containerName="dnsmasq-dns" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.875894 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="76026b95-0450-46e9-a72b-732687c4e833" containerName="dnsmasq-dns" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.876090 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" containerName="cinder-db-sync" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.876107 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="76026b95-0450-46e9-a72b-732687c4e833" containerName="dnsmasq-dns" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.877280 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.882576 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-cinder-dockercfg-w6tdz" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.883287 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.883402 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scripts" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.883595 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-config-data" Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.895300 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:51:55 crc kubenswrapper[4961]: I1205 17:51:55.965282 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-9wxdp"] Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.031882 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-r59rl"] Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.033421 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.042596 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-scripts\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.042685 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/06e09cee-db01-4318-bc75-09ac7376772d-kube-api-access-skl4x\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.042763 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.042813 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e09cee-db01-4318-bc75-09ac7376772d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.042844 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.042938 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.057358 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-r59rl"] Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146565 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7kd8g\" (UniqueName: \"kubernetes.io/projected/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-kube-api-access-7kd8g\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146659 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146703 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e09cee-db01-4318-bc75-09ac7376772d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146821 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146839 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146908 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146957 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.146993 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.147045 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-config\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.147062 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.147115 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-scripts\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.147154 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/06e09cee-db01-4318-bc75-09ac7376772d-kube-api-access-skl4x\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.150941 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e09cee-db01-4318-bc75-09ac7376772d-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.162491 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.163458 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.163729 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.167049 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.167193 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-scripts\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.172243 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/06e09cee-db01-4318-bc75-09ac7376772d-kube-api-access-skl4x\") pod \"cinder-scheduler-0\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.174735 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.180210 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.186272 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.217388 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248527 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7kd8g\" (UniqueName: \"kubernetes.io/projected/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-kube-api-access-7kd8g\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248581 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q2462\" (UniqueName: \"kubernetes.io/projected/b7e686d4-61f5-4479-b34f-e5492f1f0428-kube-api-access-q2462\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248599 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-scripts\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248629 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248664 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7e686d4-61f5-4479-b34f-e5492f1f0428-logs\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248682 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248704 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248730 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248760 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data-custom\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248804 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248846 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-config\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248864 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.248906 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7e686d4-61f5-4479-b34f-e5492f1f0428-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.250001 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-nb\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.250510 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-svc\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.251033 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-sb\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.251542 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-config\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.252760 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-swift-storage-0\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.291413 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7kd8g\" (UniqueName: \"kubernetes.io/projected/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-kube-api-access-7kd8g\") pod \"dnsmasq-dns-6bb4fc677f-r59rl\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352398 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7e686d4-61f5-4479-b34f-e5492f1f0428-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352489 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q2462\" (UniqueName: \"kubernetes.io/projected/b7e686d4-61f5-4479-b34f-e5492f1f0428-kube-api-access-q2462\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352516 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-scripts\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352582 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7e686d4-61f5-4479-b34f-e5492f1f0428-logs\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352612 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352659 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data-custom\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.352687 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.353647 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7e686d4-61f5-4479-b34f-e5492f1f0428-logs\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.353729 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7e686d4-61f5-4479-b34f-e5492f1f0428-etc-machine-id\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.360665 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data-custom\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.361350 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-scripts\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.366059 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.366262 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.387298 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerStarted","Data":"eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb"} Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.387362 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q2462\" (UniqueName: \"kubernetes.io/projected/b7e686d4-61f5-4479-b34f-e5492f1f0428-kube-api-access-q2462\") pod \"cinder-api-0\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.387963 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="ceilometer-notification-agent" containerID="cri-o://91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad" gracePeriod=30 Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.388066 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.388158 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="proxy-httpd" containerID="cri-o://eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb" gracePeriod=30 Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.388215 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="sg-core" containerID="cri-o://95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd" gracePeriod=30 Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.402940 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.403890 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.431250 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" event={"ID":"4a4e68e8-4547-458b-85bf-c898c9abcf2c","Type":"ContainerStarted","Data":"d9273eff1995f48b0a176ff9bb08f66a25488c754d61dbcb03aacdea62c66166"} Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.432383 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.625564 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" podStartSLOduration=9.625530412 podStartE2EDuration="9.625530412s" podCreationTimestamp="2025-12-05 17:51:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:56.475105917 +0000 UTC m=+1122.536256400" watchObservedRunningTime="2025-12-05 17:51:56.625530412 +0000 UTC m=+1122.686680885" Dec 05 17:51:56 crc kubenswrapper[4961]: I1205 17:51:56.859885 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.070067 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:51:57 crc kubenswrapper[4961]: W1205 17:51:57.070717 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb7e686d4_61f5_4479_b34f_e5492f1f0428.slice/crio-0e534a4a71ff88975c8d06d24a7b9f764584cbac9b0e04c8d5b9458591707213 WatchSource:0}: Error finding container 0e534a4a71ff88975c8d06d24a7b9f764584cbac9b0e04c8d5b9458591707213: Status 404 returned error can't find the container with id 0e534a4a71ff88975c8d06d24a7b9f764584cbac9b0e04c8d5b9458591707213 Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.216329 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-r59rl"] Dec 05 17:51:57 crc kubenswrapper[4961]: W1205 17:51:57.221144 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod76453a2a_3b0c_40e3_8a89_f47adb86c0b0.slice/crio-e37cdc1f6cf706a3b9605acc9f6519d9da4f90bef2e42f90b5947f1100f07ac6 WatchSource:0}: Error finding container e37cdc1f6cf706a3b9605acc9f6519d9da4f90bef2e42f90b5947f1100f07ac6: Status 404 returned error can't find the container with id e37cdc1f6cf706a3b9605acc9f6519d9da4f90bef2e42f90b5947f1100f07ac6 Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.456567 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b7e686d4-61f5-4479-b34f-e5492f1f0428","Type":"ContainerStarted","Data":"0e534a4a71ff88975c8d06d24a7b9f764584cbac9b0e04c8d5b9458591707213"} Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.459598 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" event={"ID":"76453a2a-3b0c-40e3-8a89-f47adb86c0b0","Type":"ContainerStarted","Data":"e37cdc1f6cf706a3b9605acc9f6519d9da4f90bef2e42f90b5947f1100f07ac6"} Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.467607 4961 generic.go:334] "Generic (PLEG): container finished" podID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerID="eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb" exitCode=0 Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.467635 4961 generic.go:334] "Generic (PLEG): container finished" podID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerID="95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd" exitCode=2 Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.467678 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerDied","Data":"eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb"} Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.467728 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerDied","Data":"95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd"} Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.473295 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"06e09cee-db01-4318-bc75-09ac7376772d","Type":"ContainerStarted","Data":"ad8dda4ebcaac9a62bdac4d4440f778eb0718b26d38b074f40e9834e1989c625"} Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.473464 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerName="dnsmasq-dns" containerID="cri-o://d9273eff1995f48b0a176ff9bb08f66a25488c754d61dbcb03aacdea62c66166" gracePeriod=10 Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.711559 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.791282 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:57 crc kubenswrapper[4961]: I1205 17:51:57.906308 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.029194 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/barbican-api-5d86bc7b8-5vlfw" Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.108711 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-59df8b55c8-ff94g"] Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.108978 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-59df8b55c8-ff94g" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api-log" containerID="cri-o://afe906a198e095507e3a0a32303131350a8bcb595fa6b349ef2155d6a93d6984" gracePeriod=30 Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.109581 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/barbican-api-59df8b55c8-ff94g" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api" containerID="cri-o://5720bba3c727018f5b87f03f054c3c0ed9cdc617ffbd5dfcc1e37236f64be613" gracePeriod=30 Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.515080 4961 generic.go:334] "Generic (PLEG): container finished" podID="0d65359f-708b-487d-8498-6f8537d5a443" containerID="afe906a198e095507e3a0a32303131350a8bcb595fa6b349ef2155d6a93d6984" exitCode=143 Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.515123 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59df8b55c8-ff94g" event={"ID":"0d65359f-708b-487d-8498-6f8537d5a443","Type":"ContainerDied","Data":"afe906a198e095507e3a0a32303131350a8bcb595fa6b349ef2155d6a93d6984"} Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.520523 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b7e686d4-61f5-4479-b34f-e5492f1f0428","Type":"ContainerStarted","Data":"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856"} Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.524843 4961 generic.go:334] "Generic (PLEG): container finished" podID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerID="fbad90c1bdc6ee4426529eabf4d9e67ae68e8e86a55e7940b4750bad9325b84a" exitCode=0 Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.524994 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" event={"ID":"76453a2a-3b0c-40e3-8a89-f47adb86c0b0","Type":"ContainerDied","Data":"fbad90c1bdc6ee4426529eabf4d9e67ae68e8e86a55e7940b4750bad9325b84a"} Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.541212 4961 generic.go:334] "Generic (PLEG): container finished" podID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerID="d9273eff1995f48b0a176ff9bb08f66a25488c754d61dbcb03aacdea62c66166" exitCode=0 Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.541518 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" event={"ID":"4a4e68e8-4547-458b-85bf-c898c9abcf2c","Type":"ContainerDied","Data":"d9273eff1995f48b0a176ff9bb08f66a25488c754d61dbcb03aacdea62c66166"} Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.570539 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.810950 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.943633 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-nb\") pod \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.943930 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-swift-storage-0\") pod \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.943966 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-config\") pod \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.944103 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-sb\") pod \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.944173 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-988n2\" (UniqueName: \"kubernetes.io/projected/4a4e68e8-4547-458b-85bf-c898c9abcf2c-kube-api-access-988n2\") pod \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.944224 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-svc\") pod \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\" (UID: \"4a4e68e8-4547-458b-85bf-c898c9abcf2c\") " Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.981727 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:51:58 crc kubenswrapper[4961]: I1205 17:51:58.983925 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4a4e68e8-4547-458b-85bf-c898c9abcf2c-kube-api-access-988n2" (OuterVolumeSpecName: "kube-api-access-988n2") pod "4a4e68e8-4547-458b-85bf-c898c9abcf2c" (UID: "4a4e68e8-4547-458b-85bf-c898c9abcf2c"). InnerVolumeSpecName "kube-api-access-988n2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.049591 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-988n2\" (UniqueName: \"kubernetes.io/projected/4a4e68e8-4547-458b-85bf-c898c9abcf2c-kube-api-access-988n2\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.075556 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "4a4e68e8-4547-458b-85bf-c898c9abcf2c" (UID: "4a4e68e8-4547-458b-85bf-c898c9abcf2c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.151422 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.189911 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-config" (OuterVolumeSpecName: "config") pod "4a4e68e8-4547-458b-85bf-c898c9abcf2c" (UID: "4a4e68e8-4547-458b-85bf-c898c9abcf2c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.196505 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "4a4e68e8-4547-458b-85bf-c898c9abcf2c" (UID: "4a4e68e8-4547-458b-85bf-c898c9abcf2c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.208506 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "4a4e68e8-4547-458b-85bf-c898c9abcf2c" (UID: "4a4e68e8-4547-458b-85bf-c898c9abcf2c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.210392 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "4a4e68e8-4547-458b-85bf-c898c9abcf2c" (UID: "4a4e68e8-4547-458b-85bf-c898c9abcf2c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.253176 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.253503 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.253517 4961 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.253530 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4a4e68e8-4547-458b-85bf-c898c9abcf2c-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.556430 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"06e09cee-db01-4318-bc75-09ac7376772d","Type":"ContainerStarted","Data":"1b0539d4e5dc5e9b35611fb8dc89f2f878a67bdf7463a887cc0f83b975b0e2c1"} Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.560257 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b7e686d4-61f5-4479-b34f-e5492f1f0428","Type":"ContainerStarted","Data":"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95"} Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.560401 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.560470 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api" containerID="cri-o://35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95" gracePeriod=30 Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.560438 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-api-0" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api-log" containerID="cri-o://2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856" gracePeriod=30 Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.571712 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" event={"ID":"76453a2a-3b0c-40e3-8a89-f47adb86c0b0","Type":"ContainerStarted","Data":"075befaa3821d8d3d16b09903923a00b8c7d3e1447c486fc67a827debff4174d"} Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.571832 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.574261 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" event={"ID":"4a4e68e8-4547-458b-85bf-c898c9abcf2c","Type":"ContainerDied","Data":"35c1ae7fe3b9672c7afdd685c13ff2c0135d76be245a9328f4b8cdec075515b3"} Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.574316 4961 scope.go:117] "RemoveContainer" containerID="d9273eff1995f48b0a176ff9bb08f66a25488c754d61dbcb03aacdea62c66166" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.574454 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-688c87cc99-9wxdp" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.591315 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.591295682 podStartE2EDuration="3.591295682s" podCreationTimestamp="2025-12-05 17:51:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:59.587655422 +0000 UTC m=+1125.648805905" watchObservedRunningTime="2025-12-05 17:51:59.591295682 +0000 UTC m=+1125.652446155" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.627921 4961 scope.go:117] "RemoveContainer" containerID="02df47551303860d22d86f61701e243f1f26c306b292602c04fb48b9414174e4" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.629017 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" podStartSLOduration=4.629004568 podStartE2EDuration="4.629004568s" podCreationTimestamp="2025-12-05 17:51:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:51:59.620217422 +0000 UTC m=+1125.681367905" watchObservedRunningTime="2025-12-05 17:51:59.629004568 +0000 UTC m=+1125.690155041" Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.648455 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-9wxdp"] Dec 05 17:51:59 crc kubenswrapper[4961]: I1205 17:51:59.656886 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-688c87cc99-9wxdp"] Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.406854 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.431891 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476510 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data-custom\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476608 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q2462\" (UniqueName: \"kubernetes.io/projected/b7e686d4-61f5-4479-b34f-e5492f1f0428-kube-api-access-q2462\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476654 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7e686d4-61f5-4479-b34f-e5492f1f0428-logs\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476698 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476812 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-scripts\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476892 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7e686d4-61f5-4479-b34f-e5492f1f0428-etc-machine-id\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.476947 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-combined-ca-bundle\") pod \"b7e686d4-61f5-4479-b34f-e5492f1f0428\" (UID: \"b7e686d4-61f5-4479-b34f-e5492f1f0428\") " Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.480092 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b7e686d4-61f5-4479-b34f-e5492f1f0428-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.480550 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b7e686d4-61f5-4479-b34f-e5492f1f0428-logs" (OuterVolumeSpecName: "logs") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.494519 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-scripts" (OuterVolumeSpecName: "scripts") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.506503 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.524231 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b7e686d4-61f5-4479-b34f-e5492f1f0428-kube-api-access-q2462" (OuterVolumeSpecName: "kube-api-access-q2462") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "kube-api-access-q2462". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.524281 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/neutron-88c564b55-ktjt8" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.540157 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.560558 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data" (OuterVolumeSpecName: "config-data") pod "b7e686d4-61f5-4479-b34f-e5492f1f0428" (UID: "b7e686d4-61f5-4479-b34f-e5492f1f0428"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579068 4961 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579114 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q2462\" (UniqueName: \"kubernetes.io/projected/b7e686d4-61f5-4479-b34f-e5492f1f0428-kube-api-access-q2462\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579128 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b7e686d4-61f5-4479-b34f-e5492f1f0428-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579144 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579157 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579169 4961 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b7e686d4-61f5-4479-b34f-e5492f1f0428-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.579180 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b7e686d4-61f5-4479-b34f-e5492f1f0428-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.600641 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76c589f96d-p4cfs"] Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.600974 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76c589f96d-p4cfs" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-api" containerID="cri-o://29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339" gracePeriod=30 Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.601038 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/neutron-76c589f96d-p4cfs" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-httpd" containerID="cri-o://e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6" gracePeriod=30 Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.610153 4961 generic.go:334] "Generic (PLEG): container finished" podID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerID="35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95" exitCode=0 Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.610186 4961 generic.go:334] "Generic (PLEG): container finished" podID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerID="2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856" exitCode=143 Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.610230 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b7e686d4-61f5-4479-b34f-e5492f1f0428","Type":"ContainerDied","Data":"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95"} Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.610256 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b7e686d4-61f5-4479-b34f-e5492f1f0428","Type":"ContainerDied","Data":"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856"} Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.610265 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"b7e686d4-61f5-4479-b34f-e5492f1f0428","Type":"ContainerDied","Data":"0e534a4a71ff88975c8d06d24a7b9f764584cbac9b0e04c8d5b9458591707213"} Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.610283 4961 scope.go:117] "RemoveContainer" containerID="35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.611050 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.622948 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"06e09cee-db01-4318-bc75-09ac7376772d","Type":"ContainerStarted","Data":"ec0f06db029efd69ffbad54cd8f317fac1677a3141f33bb09a5d97d37fdcba66"} Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.660864 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=4.670800615 podStartE2EDuration="5.660846684s" podCreationTimestamp="2025-12-05 17:51:55 +0000 UTC" firstStartedPulling="2025-12-05 17:51:56.887531418 +0000 UTC m=+1122.948681891" lastFinishedPulling="2025-12-05 17:51:57.877577487 +0000 UTC m=+1123.938727960" observedRunningTime="2025-12-05 17:52:00.643116488 +0000 UTC m=+1126.704266971" watchObservedRunningTime="2025-12-05 17:52:00.660846684 +0000 UTC m=+1126.721997157" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.663136 4961 scope.go:117] "RemoveContainer" containerID="2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.680821 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.692159 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.701315 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:52:00 crc kubenswrapper[4961]: E1205 17:52:00.701717 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api-log" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.701732 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api-log" Dec 05 17:52:00 crc kubenswrapper[4961]: E1205 17:52:00.701755 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.701761 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api" Dec 05 17:52:00 crc kubenswrapper[4961]: E1205 17:52:00.701797 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerName="init" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.701804 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerName="init" Dec 05 17:52:00 crc kubenswrapper[4961]: E1205 17:52:00.701817 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerName="dnsmasq-dns" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.701823 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerName="dnsmasq-dns" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.702041 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" containerName="dnsmasq-dns" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.702396 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api-log" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.702425 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" containerName="cinder-api" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.703371 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.706067 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-api-config-data" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.706257 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-public-svc" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.707438 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-cinder-internal-svc" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.711733 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.712060 4961 scope.go:117] "RemoveContainer" containerID="35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95" Dec 05 17:52:00 crc kubenswrapper[4961]: E1205 17:52:00.712498 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95\": container with ID starting with 35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95 not found: ID does not exist" containerID="35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.712542 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95"} err="failed to get container status \"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95\": rpc error: code = NotFound desc = could not find container \"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95\": container with ID starting with 35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95 not found: ID does not exist" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.712573 4961 scope.go:117] "RemoveContainer" containerID="2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856" Dec 05 17:52:00 crc kubenswrapper[4961]: E1205 17:52:00.712863 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856\": container with ID starting with 2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856 not found: ID does not exist" containerID="2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.712897 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856"} err="failed to get container status \"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856\": rpc error: code = NotFound desc = could not find container \"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856\": container with ID starting with 2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856 not found: ID does not exist" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.712914 4961 scope.go:117] "RemoveContainer" containerID="35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.713113 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95"} err="failed to get container status \"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95\": rpc error: code = NotFound desc = could not find container \"35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95\": container with ID starting with 35195394438ce7fa327d1f89c9f34a88888cfa01c66fdada1401de265ab0ee95 not found: ID does not exist" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.713132 4961 scope.go:117] "RemoveContainer" containerID="2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.713315 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856"} err="failed to get container status \"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856\": rpc error: code = NotFound desc = could not find container \"2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856\": container with ID starting with 2f64762eb29b69cd4b99f79514f58d663a745891e19fbdb3d0079a94be70e856 not found: ID does not exist" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785211 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785278 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-config-data\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785391 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-scripts\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785436 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-config-data-custom\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785464 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785512 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xx4rs\" (UniqueName: \"kubernetes.io/projected/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-kube-api-access-xx4rs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785606 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785697 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.785887 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-logs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.802804 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/horizon-78bb69647d-95ptt" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.863914 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65bb59d746-cqlw9"] Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.864168 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-65bb59d746-cqlw9" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon-log" containerID="cri-o://b962d0a3c530d2aeff022acebc2fd4c86c94ea227e97d35c58d1ba851a3abe96" gracePeriod=30 Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.865681 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/horizon-65bb59d746-cqlw9" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" containerID="cri-o://01aff3bd688c5b14a84d39236485cd65f7bf7cd0d578accde815230603894791" gracePeriod=30 Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887708 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4a4e68e8-4547-458b-85bf-c898c9abcf2c" path="/var/lib/kubelet/pods/4a4e68e8-4547-458b-85bf-c898c9abcf2c/volumes" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887757 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887835 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-logs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887894 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887937 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-config-data\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887966 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-scripts\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887983 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-config-data-custom\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.887997 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.888019 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xx4rs\" (UniqueName: \"kubernetes.io/projected/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-kube-api-access-xx4rs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.888073 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.889010 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b7e686d4-61f5-4479-b34f-e5492f1f0428" path="/var/lib/kubelet/pods/b7e686d4-61f5-4479-b34f-e5492f1f0428/volumes" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.890510 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-etc-machine-id\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.892085 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-logs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.895334 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-config-data-custom\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.897270 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-scripts\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.897490 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-public-tls-certs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.898637 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-internal-tls-certs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.899285 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-combined-ca-bundle\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.903646 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-config-data\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:00 crc kubenswrapper[4961]: I1205 17:52:00.918257 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xx4rs\" (UniqueName: \"kubernetes.io/projected/7aa62ef2-e824-4f99-98bc-d4049b51ab7e-kube-api-access-xx4rs\") pod \"cinder-api-0\" (UID: \"7aa62ef2-e824-4f99-98bc-d4049b51ab7e\") " pod="openstack/cinder-api-0" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.032702 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-api-0" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.218150 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.508347 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-api-0"] Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.641065 4961 generic.go:334] "Generic (PLEG): container finished" podID="0d65359f-708b-487d-8498-6f8537d5a443" containerID="5720bba3c727018f5b87f03f054c3c0ed9cdc617ffbd5dfcc1e37236f64be613" exitCode=0 Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.641217 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59df8b55c8-ff94g" event={"ID":"0d65359f-708b-487d-8498-6f8537d5a443","Type":"ContainerDied","Data":"5720bba3c727018f5b87f03f054c3c0ed9cdc617ffbd5dfcc1e37236f64be613"} Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.646134 4961 generic.go:334] "Generic (PLEG): container finished" podID="5804df6a-9783-4952-84e2-deb85ddc3133" containerID="e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6" exitCode=0 Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.646233 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76c589f96d-p4cfs" event={"ID":"5804df6a-9783-4952-84e2-deb85ddc3133","Type":"ContainerDied","Data":"e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6"} Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.650972 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7aa62ef2-e824-4f99-98bc-d4049b51ab7e","Type":"ContainerStarted","Data":"332fd02543437c89462b1eb4142c1dac7e0a5484a74b0ef08c6c39a0dcd77c1f"} Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.801930 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.910056 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7cr2z\" (UniqueName: \"kubernetes.io/projected/0d65359f-708b-487d-8498-6f8537d5a443-kube-api-access-7cr2z\") pod \"0d65359f-708b-487d-8498-6f8537d5a443\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.910110 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d65359f-708b-487d-8498-6f8537d5a443-logs\") pod \"0d65359f-708b-487d-8498-6f8537d5a443\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.910148 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data-custom\") pod \"0d65359f-708b-487d-8498-6f8537d5a443\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.910236 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data\") pod \"0d65359f-708b-487d-8498-6f8537d5a443\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.910263 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-combined-ca-bundle\") pod \"0d65359f-708b-487d-8498-6f8537d5a443\" (UID: \"0d65359f-708b-487d-8498-6f8537d5a443\") " Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.913262 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0d65359f-708b-487d-8498-6f8537d5a443-logs" (OuterVolumeSpecName: "logs") pod "0d65359f-708b-487d-8498-6f8537d5a443" (UID: "0d65359f-708b-487d-8498-6f8537d5a443"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.917271 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0d65359f-708b-487d-8498-6f8537d5a443-kube-api-access-7cr2z" (OuterVolumeSpecName: "kube-api-access-7cr2z") pod "0d65359f-708b-487d-8498-6f8537d5a443" (UID: "0d65359f-708b-487d-8498-6f8537d5a443"). InnerVolumeSpecName "kube-api-access-7cr2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.921038 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "0d65359f-708b-487d-8498-6f8537d5a443" (UID: "0d65359f-708b-487d-8498-6f8537d5a443"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.953632 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0d65359f-708b-487d-8498-6f8537d5a443" (UID: "0d65359f-708b-487d-8498-6f8537d5a443"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:01 crc kubenswrapper[4961]: I1205 17:52:01.994457 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data" (OuterVolumeSpecName: "config-data") pod "0d65359f-708b-487d-8498-6f8537d5a443" (UID: "0d65359f-708b-487d-8498-6f8537d5a443"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.012829 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7cr2z\" (UniqueName: \"kubernetes.io/projected/0d65359f-708b-487d-8498-6f8537d5a443-kube-api-access-7cr2z\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.012859 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/0d65359f-708b-487d-8498-6f8537d5a443-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.012872 4961 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.012881 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.012890 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0d65359f-708b-487d-8498-6f8537d5a443-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.027458 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114123 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-sg-core-conf-yaml\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114183 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-run-httpd\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114340 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-combined-ca-bundle\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114377 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-config-data\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114552 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-log-httpd\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114593 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ltz74\" (UniqueName: \"kubernetes.io/projected/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-kube-api-access-ltz74\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.114649 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-scripts\") pod \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\" (UID: \"c30d6edf-8519-48cf-bcb0-f35c08e19a8b\") " Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.115515 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.115824 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.119984 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-kube-api-access-ltz74" (OuterVolumeSpecName: "kube-api-access-ltz74") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "kube-api-access-ltz74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.120521 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ltz74\" (UniqueName: \"kubernetes.io/projected/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-kube-api-access-ltz74\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.120551 4961 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.120561 4961 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.127404 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-scripts" (OuterVolumeSpecName: "scripts") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.167905 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.200496 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.221873 4961 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.221908 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.221923 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.224920 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-config-data" (OuterVolumeSpecName: "config-data") pod "c30d6edf-8519-48cf-bcb0-f35c08e19a8b" (UID: "c30d6edf-8519-48cf-bcb0-f35c08e19a8b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.324101 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c30d6edf-8519-48cf-bcb0-f35c08e19a8b-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.663725 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7aa62ef2-e824-4f99-98bc-d4049b51ab7e","Type":"ContainerStarted","Data":"745ec1e066a92bcb94273b1a10d4bcbfd0826af553f2dde4807c0a17bda5b415"} Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.667258 4961 generic.go:334] "Generic (PLEG): container finished" podID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerID="91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad" exitCode=0 Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.667316 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerDied","Data":"91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad"} Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.667345 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"c30d6edf-8519-48cf-bcb0-f35c08e19a8b","Type":"ContainerDied","Data":"6574561f9d3bc45fcf61fda6ff0b2ae8099cd95ed1d2e56e72f8ddf334484290"} Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.667363 4961 scope.go:117] "RemoveContainer" containerID="eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.667468 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.678560 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/barbican-api-59df8b55c8-ff94g" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.679378 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/barbican-api-59df8b55c8-ff94g" event={"ID":"0d65359f-708b-487d-8498-6f8537d5a443","Type":"ContainerDied","Data":"e2516ff3539ba7a3de869cac5975202340c24114aa01e121f842645c839e17ba"} Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.703038 4961 scope.go:117] "RemoveContainer" containerID="95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.768836 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.774668 4961 scope.go:117] "RemoveContainer" containerID="91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.777755 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.788942 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-api-59df8b55c8-ff94g"] Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.814784 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.815249 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="proxy-httpd" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.815266 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="proxy-httpd" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.815282 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api-log" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.815288 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api-log" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.815306 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="ceilometer-notification-agent" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.815312 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="ceilometer-notification-agent" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.815326 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.815331 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.815339 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="sg-core" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.815346 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="sg-core" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.818167 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.818207 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="proxy-httpd" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.818226 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="0d65359f-708b-487d-8498-6f8537d5a443" containerName="barbican-api-log" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.818234 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="sg-core" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.818248 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" containerName="ceilometer-notification-agent" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.821261 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.822101 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-api-59df8b55c8-ff94g"] Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.824185 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.824390 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.833043 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.834479 4961 scope.go:117] "RemoveContainer" containerID="eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.835202 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb\": container with ID starting with eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb not found: ID does not exist" containerID="eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.835234 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb"} err="failed to get container status \"eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb\": rpc error: code = NotFound desc = could not find container \"eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb\": container with ID starting with eebbe11a2a2fc05c066b28acb1f8dc3ba86786b8e23240ca6f5ab3ebbe99fbcb not found: ID does not exist" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.835256 4961 scope.go:117] "RemoveContainer" containerID="95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.835503 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd\": container with ID starting with 95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd not found: ID does not exist" containerID="95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.835527 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd"} err="failed to get container status \"95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd\": rpc error: code = NotFound desc = could not find container \"95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd\": container with ID starting with 95a0a635006377cba44f567ed9b400f892dedd06650de3cd25250742f3ccc4dd not found: ID does not exist" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.835542 4961 scope.go:117] "RemoveContainer" containerID="91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad" Dec 05 17:52:02 crc kubenswrapper[4961]: E1205 17:52:02.837513 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad\": container with ID starting with 91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad not found: ID does not exist" containerID="91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.837555 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad"} err="failed to get container status \"91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad\": rpc error: code = NotFound desc = could not find container \"91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad\": container with ID starting with 91ba97da2e86065ff4b790a83ee68ce363090d49ee7585ccbe7f1ba79126fdad not found: ID does not exist" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.837601 4961 scope.go:117] "RemoveContainer" containerID="5720bba3c727018f5b87f03f054c3c0ed9cdc617ffbd5dfcc1e37236f64be613" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857271 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-config-data\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857327 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-run-httpd\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857376 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-log-httpd\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857396 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857446 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qdvz7\" (UniqueName: \"kubernetes.io/projected/083ba775-2cc0-4528-9712-27574bbf5df7-kube-api-access-qdvz7\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857467 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-scripts\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.857486 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.869560 4961 scope.go:117] "RemoveContainer" containerID="afe906a198e095507e3a0a32303131350a8bcb595fa6b349ef2155d6a93d6984" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.878515 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0d65359f-708b-487d-8498-6f8537d5a443" path="/var/lib/kubelet/pods/0d65359f-708b-487d-8498-6f8537d5a443/volumes" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.879759 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c30d6edf-8519-48cf-bcb0-f35c08e19a8b" path="/var/lib/kubelet/pods/c30d6edf-8519-48cf-bcb0-f35c08e19a8b/volumes" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958567 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qdvz7\" (UniqueName: \"kubernetes.io/projected/083ba775-2cc0-4528-9712-27574bbf5df7-kube-api-access-qdvz7\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958609 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-scripts\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958631 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958703 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-config-data\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958729 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-run-httpd\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958796 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-log-httpd\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.958817 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.962292 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-log-httpd\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.965065 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-run-httpd\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.966520 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-scripts\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.966861 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.971753 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-config-data\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.978544 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qdvz7\" (UniqueName: \"kubernetes.io/projected/083ba775-2cc0-4528-9712-27574bbf5df7-kube-api-access-qdvz7\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:02 crc kubenswrapper[4961]: I1205 17:52:02.995512 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " pod="openstack/ceilometer-0" Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.054736 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.057739 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/placement-846ff7879b-wj44p" Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.147869 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.688511 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-api-0" event={"ID":"7aa62ef2-e824-4f99-98bc-d4049b51ab7e","Type":"ContainerStarted","Data":"4ecc61ef07d96499b28184d8eab0febc076f6d8188ba84c7d668c4a91789c861"} Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.688855 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/cinder-api-0" Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.717485 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:03 crc kubenswrapper[4961]: W1205 17:52:03.719659 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod083ba775_2cc0_4528_9712_27574bbf5df7.slice/crio-838b5bc176ccb3ffbd0846ee4a5847ea01b289b44eb1475a202173ddb76d7329 WatchSource:0}: Error finding container 838b5bc176ccb3ffbd0846ee4a5847ea01b289b44eb1475a202173ddb76d7329: Status 404 returned error can't find the container with id 838b5bc176ccb3ffbd0846ee4a5847ea01b289b44eb1475a202173ddb76d7329 Dec 05 17:52:03 crc kubenswrapper[4961]: I1205 17:52:03.740263 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-api-0" podStartSLOduration=3.740237574 podStartE2EDuration="3.740237574s" podCreationTimestamp="2025-12-05 17:52:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:03.727192454 +0000 UTC m=+1129.788342937" watchObservedRunningTime="2025-12-05 17:52:03.740237574 +0000 UTC m=+1129.801388047" Dec 05 17:52:04 crc kubenswrapper[4961]: I1205 17:52:04.703930 4961 generic.go:334] "Generic (PLEG): container finished" podID="01eef206-68f4-4923-8253-2a130ba0dca3" containerID="01aff3bd688c5b14a84d39236485cd65f7bf7cd0d578accde815230603894791" exitCode=0 Dec 05 17:52:04 crc kubenswrapper[4961]: I1205 17:52:04.704016 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65bb59d746-cqlw9" event={"ID":"01eef206-68f4-4923-8253-2a130ba0dca3","Type":"ContainerDied","Data":"01aff3bd688c5b14a84d39236485cd65f7bf7cd0d578accde815230603894791"} Dec 05 17:52:04 crc kubenswrapper[4961]: I1205 17:52:04.707047 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerStarted","Data":"7d2e1d7a1ef48f7abf6b14ad0ca0b52ad4571421a23555acfd6586457b70174e"} Dec 05 17:52:04 crc kubenswrapper[4961]: I1205 17:52:04.707080 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerStarted","Data":"838b5bc176ccb3ffbd0846ee4a5847ea01b289b44eb1475a202173ddb76d7329"} Dec 05 17:52:05 crc kubenswrapper[4961]: I1205 17:52:05.117297 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-65bb59d746-cqlw9" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 17:52:05 crc kubenswrapper[4961]: I1205 17:52:05.719397 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerStarted","Data":"72f0c5cc6585e3ff2f634480cd51db4ec1a91ccc8e98fc69da63cbb0a669335f"} Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.296985 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.406954 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.426895 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-combined-ca-bundle\") pod \"5804df6a-9783-4952-84e2-deb85ddc3133\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.427048 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-ovndb-tls-certs\") pod \"5804df6a-9783-4952-84e2-deb85ddc3133\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.427096 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-httpd-config\") pod \"5804df6a-9783-4952-84e2-deb85ddc3133\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.427140 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-config\") pod \"5804df6a-9783-4952-84e2-deb85ddc3133\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.427185 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6s7c\" (UniqueName: \"kubernetes.io/projected/5804df6a-9783-4952-84e2-deb85ddc3133-kube-api-access-d6s7c\") pod \"5804df6a-9783-4952-84e2-deb85ddc3133\" (UID: \"5804df6a-9783-4952-84e2-deb85ddc3133\") " Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.436451 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.440932 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5804df6a-9783-4952-84e2-deb85ddc3133-kube-api-access-d6s7c" (OuterVolumeSpecName: "kube-api-access-d6s7c") pod "5804df6a-9783-4952-84e2-deb85ddc3133" (UID: "5804df6a-9783-4952-84e2-deb85ddc3133"). InnerVolumeSpecName "kube-api-access-d6s7c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.446277 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-httpd-config" (OuterVolumeSpecName: "httpd-config") pod "5804df6a-9783-4952-84e2-deb85ddc3133" (UID: "5804df6a-9783-4952-84e2-deb85ddc3133"). InnerVolumeSpecName "httpd-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.490376 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b6c948c7-stf4x"] Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.490907 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerName="dnsmasq-dns" containerID="cri-o://0a71f92debbc45d49290618060f351b47f1d47c8a587ae8116e9bfc2533e201b" gracePeriod=10 Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.543793 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.573091 4961 reconciler_common.go:293] "Volume detached for volume \"httpd-config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-httpd-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.585611 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6s7c\" (UniqueName: \"kubernetes.io/projected/5804df6a-9783-4952-84e2-deb85ddc3133-kube-api-access-d6s7c\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.610323 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-config" (OuterVolumeSpecName: "config") pod "5804df6a-9783-4952-84e2-deb85ddc3133" (UID: "5804df6a-9783-4952-84e2-deb85ddc3133"). InnerVolumeSpecName "config". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.618236 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5804df6a-9783-4952-84e2-deb85ddc3133" (UID: "5804df6a-9783-4952-84e2-deb85ddc3133"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.627926 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-ovndb-tls-certs" (OuterVolumeSpecName: "ovndb-tls-certs") pod "5804df6a-9783-4952-84e2-deb85ddc3133" (UID: "5804df6a-9783-4952-84e2-deb85ddc3133"). InnerVolumeSpecName "ovndb-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.687495 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.687530 4961 reconciler_common.go:293] "Volume detached for volume \"ovndb-tls-certs\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-ovndb-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.687540 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/secret/5804df6a-9783-4952-84e2-deb85ddc3133-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.745394 4961 generic.go:334] "Generic (PLEG): container finished" podID="5804df6a-9783-4952-84e2-deb85ddc3133" containerID="29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339" exitCode=0 Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.745528 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76c589f96d-p4cfs" event={"ID":"5804df6a-9783-4952-84e2-deb85ddc3133","Type":"ContainerDied","Data":"29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339"} Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.745739 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-76c589f96d-p4cfs" event={"ID":"5804df6a-9783-4952-84e2-deb85ddc3133","Type":"ContainerDied","Data":"cec8eb27733a95f61ef452cb9c733dab2428e77501b98340feba1e197ddf4f82"} Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.745762 4961 scope.go:117] "RemoveContainer" containerID="e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.745911 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-76c589f96d-p4cfs" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.765191 4961 generic.go:334] "Generic (PLEG): container finished" podID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerID="0a71f92debbc45d49290618060f351b47f1d47c8a587ae8116e9bfc2533e201b" exitCode=0 Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.765271 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" event={"ID":"779e9f2f-2533-432a-9ee4-0adb27af5405","Type":"ContainerDied","Data":"0a71f92debbc45d49290618060f351b47f1d47c8a587ae8116e9bfc2533e201b"} Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.772228 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="cinder-scheduler" containerID="cri-o://1b0539d4e5dc5e9b35611fb8dc89f2f878a67bdf7463a887cc0f83b975b0e2c1" gracePeriod=30 Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.774192 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerStarted","Data":"ffd086d664dc56d275e927d1d95a64e5f2c85e9ec384e7902e8b15d3a9fec9a2"} Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.774271 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/cinder-scheduler-0" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="probe" containerID="cri-o://ec0f06db029efd69ffbad54cd8f317fac1677a3141f33bb09a5d97d37fdcba66" gracePeriod=30 Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.780070 4961 scope.go:117] "RemoveContainer" containerID="29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.793820 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-76c589f96d-p4cfs"] Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.803641 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-76c589f96d-p4cfs"] Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.823027 4961 scope.go:117] "RemoveContainer" containerID="e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6" Dec 05 17:52:06 crc kubenswrapper[4961]: E1205 17:52:06.826288 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6\": container with ID starting with e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6 not found: ID does not exist" containerID="e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.826342 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6"} err="failed to get container status \"e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6\": rpc error: code = NotFound desc = could not find container \"e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6\": container with ID starting with e1e14ad67340cee5387ebd515cf221545bcbd1b551f967e84f3df97acc62b0a6 not found: ID does not exist" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.826376 4961 scope.go:117] "RemoveContainer" containerID="29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339" Dec 05 17:52:06 crc kubenswrapper[4961]: E1205 17:52:06.827275 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339\": container with ID starting with 29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339 not found: ID does not exist" containerID="29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.827318 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339"} err="failed to get container status \"29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339\": rpc error: code = NotFound desc = could not find container \"29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339\": container with ID starting with 29879b7015dfef1936bb009a898915748ff1af72baa26f5e66466acda2dd8339 not found: ID does not exist" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.879634 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" path="/var/lib/kubelet/pods/5804df6a-9783-4952-84e2-deb85ddc3133/volumes" Dec 05 17:52:06 crc kubenswrapper[4961]: I1205 17:52:06.961587 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.099696 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-nb\") pod \"779e9f2f-2533-432a-9ee4-0adb27af5405\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.099832 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-sb\") pod \"779e9f2f-2533-432a-9ee4-0adb27af5405\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.099888 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kgjsc\" (UniqueName: \"kubernetes.io/projected/779e9f2f-2533-432a-9ee4-0adb27af5405-kube-api-access-kgjsc\") pod \"779e9f2f-2533-432a-9ee4-0adb27af5405\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.099951 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-config\") pod \"779e9f2f-2533-432a-9ee4-0adb27af5405\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.100004 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-dns-svc\") pod \"779e9f2f-2533-432a-9ee4-0adb27af5405\" (UID: \"779e9f2f-2533-432a-9ee4-0adb27af5405\") " Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.111847 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/779e9f2f-2533-432a-9ee4-0adb27af5405-kube-api-access-kgjsc" (OuterVolumeSpecName: "kube-api-access-kgjsc") pod "779e9f2f-2533-432a-9ee4-0adb27af5405" (UID: "779e9f2f-2533-432a-9ee4-0adb27af5405"). InnerVolumeSpecName "kube-api-access-kgjsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.147645 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "779e9f2f-2533-432a-9ee4-0adb27af5405" (UID: "779e9f2f-2533-432a-9ee4-0adb27af5405"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.160341 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "779e9f2f-2533-432a-9ee4-0adb27af5405" (UID: "779e9f2f-2533-432a-9ee4-0adb27af5405"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.174685 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-config" (OuterVolumeSpecName: "config") pod "779e9f2f-2533-432a-9ee4-0adb27af5405" (UID: "779e9f2f-2533-432a-9ee4-0adb27af5405"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.183523 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "779e9f2f-2533-432a-9ee4-0adb27af5405" (UID: "779e9f2f-2533-432a-9ee4-0adb27af5405"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.201746 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.201791 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.201802 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.201814 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/779e9f2f-2533-432a-9ee4-0adb27af5405-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.201823 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kgjsc\" (UniqueName: \"kubernetes.io/projected/779e9f2f-2533-432a-9ee4-0adb27af5405-kube-api-access-kgjsc\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.788364 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerStarted","Data":"3b20f58771e336b947ebe3957ceaae39aa08b59631e2240f21effe3cb0d2b50c"} Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.788612 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.790734 4961 generic.go:334] "Generic (PLEG): container finished" podID="06e09cee-db01-4318-bc75-09ac7376772d" containerID="ec0f06db029efd69ffbad54cd8f317fac1677a3141f33bb09a5d97d37fdcba66" exitCode=0 Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.790812 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"06e09cee-db01-4318-bc75-09ac7376772d","Type":"ContainerDied","Data":"ec0f06db029efd69ffbad54cd8f317fac1677a3141f33bb09a5d97d37fdcba66"} Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.798163 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" event={"ID":"779e9f2f-2533-432a-9ee4-0adb27af5405","Type":"ContainerDied","Data":"553a79ab2e6b26e0df55d2a6b75f9459498a8f4c14f0f70236147e77d1b74b2b"} Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.798363 4961 scope.go:117] "RemoveContainer" containerID="0a71f92debbc45d49290618060f351b47f1d47c8a587ae8116e9bfc2533e201b" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.798249 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-b6c948c7-stf4x" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.812682 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.189160661 podStartE2EDuration="5.812660597s" podCreationTimestamp="2025-12-05 17:52:02 +0000 UTC" firstStartedPulling="2025-12-05 17:52:03.722224631 +0000 UTC m=+1129.783375104" lastFinishedPulling="2025-12-05 17:52:07.345724567 +0000 UTC m=+1133.406875040" observedRunningTime="2025-12-05 17:52:07.804751483 +0000 UTC m=+1133.865901946" watchObservedRunningTime="2025-12-05 17:52:07.812660597 +0000 UTC m=+1133.873811070" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.835570 4961 scope.go:117] "RemoveContainer" containerID="1a7dce6d8d15e10b4a6cec51ec6150ced46cc2fcbe74962819ddd51f5e03f164" Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.841341 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-b6c948c7-stf4x"] Dec 05 17:52:07 crc kubenswrapper[4961]: I1205 17:52:07.849565 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-b6c948c7-stf4x"] Dec 05 17:52:08 crc kubenswrapper[4961]: I1205 17:52:08.883366 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" path="/var/lib/kubelet/pods/779e9f2f-2533-432a-9ee4-0adb27af5405/volumes" Dec 05 17:52:10 crc kubenswrapper[4961]: I1205 17:52:10.832335 4961 generic.go:334] "Generic (PLEG): container finished" podID="06e09cee-db01-4318-bc75-09ac7376772d" containerID="1b0539d4e5dc5e9b35611fb8dc89f2f878a67bdf7463a887cc0f83b975b0e2c1" exitCode=0 Dec 05 17:52:10 crc kubenswrapper[4961]: I1205 17:52:10.832404 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"06e09cee-db01-4318-bc75-09ac7376772d","Type":"ContainerDied","Data":"1b0539d4e5dc5e9b35611fb8dc89f2f878a67bdf7463a887cc0f83b975b0e2c1"} Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.031034 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.182698 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-combined-ca-bundle\") pod \"06e09cee-db01-4318-bc75-09ac7376772d\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.182743 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e09cee-db01-4318-bc75-09ac7376772d-etc-machine-id\") pod \"06e09cee-db01-4318-bc75-09ac7376772d\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.182870 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data-custom\") pod \"06e09cee-db01-4318-bc75-09ac7376772d\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.182966 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/06e09cee-db01-4318-bc75-09ac7376772d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "06e09cee-db01-4318-bc75-09ac7376772d" (UID: "06e09cee-db01-4318-bc75-09ac7376772d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.183009 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-scripts\") pod \"06e09cee-db01-4318-bc75-09ac7376772d\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.183102 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/06e09cee-db01-4318-bc75-09ac7376772d-kube-api-access-skl4x\") pod \"06e09cee-db01-4318-bc75-09ac7376772d\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.183148 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data\") pod \"06e09cee-db01-4318-bc75-09ac7376772d\" (UID: \"06e09cee-db01-4318-bc75-09ac7376772d\") " Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.183557 4961 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/06e09cee-db01-4318-bc75-09ac7376772d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.189582 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-scripts" (OuterVolumeSpecName: "scripts") pod "06e09cee-db01-4318-bc75-09ac7376772d" (UID: "06e09cee-db01-4318-bc75-09ac7376772d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.190432 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "06e09cee-db01-4318-bc75-09ac7376772d" (UID: "06e09cee-db01-4318-bc75-09ac7376772d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.196956 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/06e09cee-db01-4318-bc75-09ac7376772d-kube-api-access-skl4x" (OuterVolumeSpecName: "kube-api-access-skl4x") pod "06e09cee-db01-4318-bc75-09ac7376772d" (UID: "06e09cee-db01-4318-bc75-09ac7376772d"). InnerVolumeSpecName "kube-api-access-skl4x". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.243423 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "06e09cee-db01-4318-bc75-09ac7376772d" (UID: "06e09cee-db01-4318-bc75-09ac7376772d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.291034 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.291103 4961 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data-custom\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.291120 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.291130 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-skl4x\" (UniqueName: \"kubernetes.io/projected/06e09cee-db01-4318-bc75-09ac7376772d-kube-api-access-skl4x\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.304678 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data" (OuterVolumeSpecName: "config-data") pod "06e09cee-db01-4318-bc75-09ac7376772d" (UID: "06e09cee-db01-4318-bc75-09ac7376772d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.393806 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/06e09cee-db01-4318-bc75-09ac7376772d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.847247 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"06e09cee-db01-4318-bc75-09ac7376772d","Type":"ContainerDied","Data":"ad8dda4ebcaac9a62bdac4d4440f778eb0718b26d38b074f40e9834e1989c625"} Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.847301 4961 scope.go:117] "RemoveContainer" containerID="ec0f06db029efd69ffbad54cd8f317fac1677a3141f33bb09a5d97d37fdcba66" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.847333 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.878990 4961 scope.go:117] "RemoveContainer" containerID="1b0539d4e5dc5e9b35611fb8dc89f2f878a67bdf7463a887cc0f83b975b0e2c1" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.885748 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.918179 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.940809 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:52:11 crc kubenswrapper[4961]: E1205 17:52:11.941474 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="cinder-scheduler" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.941500 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="cinder-scheduler" Dec 05 17:52:11 crc kubenswrapper[4961]: E1205 17:52:11.941524 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-api" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.941536 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-api" Dec 05 17:52:11 crc kubenswrapper[4961]: E1205 17:52:11.941559 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="probe" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.941567 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="probe" Dec 05 17:52:11 crc kubenswrapper[4961]: E1205 17:52:11.941583 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerName="init" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.941590 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerName="init" Dec 05 17:52:11 crc kubenswrapper[4961]: E1205 17:52:11.941599 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-httpd" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.941607 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-httpd" Dec 05 17:52:11 crc kubenswrapper[4961]: E1205 17:52:11.941621 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerName="dnsmasq-dns" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.941630 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerName="dnsmasq-dns" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.942560 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-api" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.942592 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="cinder-scheduler" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.942617 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="06e09cee-db01-4318-bc75-09ac7376772d" containerName="probe" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.942633 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5804df6a-9783-4952-84e2-deb85ddc3133" containerName="neutron-httpd" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.942656 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="779e9f2f-2533-432a-9ee4-0adb27af5405" containerName="dnsmasq-dns" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.944528 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.950547 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cinder-scheduler-config-data" Dec 05 17:52:11 crc kubenswrapper[4961]: I1205 17:52:11.959942 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.110292 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hczrp\" (UniqueName: \"kubernetes.io/projected/9188090c-6109-45b2-b63c-1656ebb2ad0e-kube-api-access-hczrp\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.110429 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-scripts\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.110478 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-config-data\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.110514 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.110550 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9188090c-6109-45b2-b63c-1656ebb2ad0e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.110625 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212193 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-scripts\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212254 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-config-data\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212300 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212336 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9188090c-6109-45b2-b63c-1656ebb2ad0e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212385 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212444 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hczrp\" (UniqueName: \"kubernetes.io/projected/9188090c-6109-45b2-b63c-1656ebb2ad0e-kube-api-access-hczrp\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.212520 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/9188090c-6109-45b2-b63c-1656ebb2ad0e-etc-machine-id\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.217206 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-scripts\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.219063 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-config-data\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.219246 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-combined-ca-bundle\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.219833 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/9188090c-6109-45b2-b63c-1656ebb2ad0e-config-data-custom\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.234547 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hczrp\" (UniqueName: \"kubernetes.io/projected/9188090c-6109-45b2-b63c-1656ebb2ad0e-kube-api-access-hczrp\") pod \"cinder-scheduler-0\" (UID: \"9188090c-6109-45b2-b63c-1656ebb2ad0e\") " pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.265899 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/cinder-scheduler-0" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.730923 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/cinder-scheduler-0"] Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.890711 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="06e09cee-db01-4318-bc75-09ac7376772d" path="/var/lib/kubelet/pods/06e09cee-db01-4318-bc75-09ac7376772d/volumes" Dec 05 17:52:12 crc kubenswrapper[4961]: I1205 17:52:12.892102 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9188090c-6109-45b2-b63c-1656ebb2ad0e","Type":"ContainerStarted","Data":"d00c548d5423e5ee6e3992df4640a0c8a21d6e00eb0741e9e9860d0b56791d02"} Dec 05 17:52:13 crc kubenswrapper[4961]: I1205 17:52:13.161300 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/cinder-api-0" Dec 05 17:52:13 crc kubenswrapper[4961]: I1205 17:52:13.786358 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/keystone-7896fbd4bd-l9rg6" Dec 05 17:52:13 crc kubenswrapper[4961]: I1205 17:52:13.899314 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9188090c-6109-45b2-b63c-1656ebb2ad0e","Type":"ContainerStarted","Data":"ec2d87f01e2e18ef7551db3a8107746ea35fd3147217e55e086121692cb2e2a4"} Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.770704 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.772329 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.775524 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstackclient-openstackclient-dockercfg-pfzlf" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.775533 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-config" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.776157 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-config-secret" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.787097 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.926658 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/cinder-scheduler-0" event={"ID":"9188090c-6109-45b2-b63c-1656ebb2ad0e","Type":"ContainerStarted","Data":"7e6a4cc1b5dea3b8ea95e255cb1440b91a3434387f896722229afe918f24abd3"} Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.954200 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/cinder-scheduler-0" podStartSLOduration=3.9541816069999998 podStartE2EDuration="3.954181607s" podCreationTimestamp="2025-12-05 17:52:11 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:14.946687703 +0000 UTC m=+1141.007838196" watchObservedRunningTime="2025-12-05 17:52:14.954181607 +0000 UTC m=+1141.015332080" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.973983 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zt6qm\" (UniqueName: \"kubernetes.io/projected/1632484b-b7d9-40a3-a1c0-9bf237beb70b-kube-api-access-zt6qm\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.974505 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.974650 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:14 crc kubenswrapper[4961]: I1205 17:52:14.974745 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config-secret\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.080254 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.080695 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.080733 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config-secret\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.081336 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zt6qm\" (UniqueName: \"kubernetes.io/projected/1632484b-b7d9-40a3-a1c0-9bf237beb70b-kube-api-access-zt6qm\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.084178 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.087381 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-combined-ca-bundle\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.109231 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.109748 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config-secret\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: E1205 17:52:15.110243 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="unmounted volumes=[kube-api-access-zt6qm openstack-config-secret], unattached volumes=[], failed to process volumes=[]: context canceled" pod="openstack/openstackclient" podUID="1632484b-b7d9-40a3-a1c0-9bf237beb70b" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.118369 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-65bb59d746-cqlw9" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.132395 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zt6qm\" (UniqueName: \"kubernetes.io/projected/1632484b-b7d9-40a3-a1c0-9bf237beb70b-kube-api-access-zt6qm\") pod \"openstackclient\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.161385 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.172800 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.174405 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.183265 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.285172 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-openstack-config\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.285371 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.285507 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpgks\" (UniqueName: \"kubernetes.io/projected/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-kube-api-access-xpgks\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.285564 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-openstack-config-secret\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.387699 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.387873 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpgks\" (UniqueName: \"kubernetes.io/projected/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-kube-api-access-xpgks\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.387922 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-openstack-config-secret\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.387947 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-openstack-config\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.389099 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-openstack-config\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.392063 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-combined-ca-bundle\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.392420 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-openstack-config-secret\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.407344 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpgks\" (UniqueName: \"kubernetes.io/projected/ce8ed9d8-89a0-4d15-9f08-d30111d16a2e-kube-api-access-xpgks\") pod \"openstackclient\" (UID: \"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e\") " pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.527552 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.937605 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.944766 4961 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1632484b-b7d9-40a3-a1c0-9bf237beb70b" podUID="ce8ed9d8-89a0-4d15-9f08-d30111d16a2e" Dec 05 17:52:15 crc kubenswrapper[4961]: I1205 17:52:15.948351 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.000874 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/openstackclient"] Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.101555 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config-secret\") pod \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.101667 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config\") pod \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.101802 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zt6qm\" (UniqueName: \"kubernetes.io/projected/1632484b-b7d9-40a3-a1c0-9bf237beb70b-kube-api-access-zt6qm\") pod \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.101879 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-combined-ca-bundle\") pod \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\" (UID: \"1632484b-b7d9-40a3-a1c0-9bf237beb70b\") " Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.102218 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "1632484b-b7d9-40a3-a1c0-9bf237beb70b" (UID: "1632484b-b7d9-40a3-a1c0-9bf237beb70b"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.102374 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.113008 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1632484b-b7d9-40a3-a1c0-9bf237beb70b-kube-api-access-zt6qm" (OuterVolumeSpecName: "kube-api-access-zt6qm") pod "1632484b-b7d9-40a3-a1c0-9bf237beb70b" (UID: "1632484b-b7d9-40a3-a1c0-9bf237beb70b"). InnerVolumeSpecName "kube-api-access-zt6qm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.125053 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1632484b-b7d9-40a3-a1c0-9bf237beb70b" (UID: "1632484b-b7d9-40a3-a1c0-9bf237beb70b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.125198 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "1632484b-b7d9-40a3-a1c0-9bf237beb70b" (UID: "1632484b-b7d9-40a3-a1c0-9bf237beb70b"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.204558 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zt6qm\" (UniqueName: \"kubernetes.io/projected/1632484b-b7d9-40a3-a1c0-9bf237beb70b-kube-api-access-zt6qm\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.204605 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.204616 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/1632484b-b7d9-40a3-a1c0-9bf237beb70b-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.879632 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1632484b-b7d9-40a3-a1c0-9bf237beb70b" path="/var/lib/kubelet/pods/1632484b-b7d9-40a3-a1c0-9bf237beb70b/volumes" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.949830 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/openstackclient" Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.949902 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e","Type":"ContainerStarted","Data":"2a67ca4b803be4e0e830eb4c49e0ca0fa2aca9cf1d36f4298198a7c4ee28c652"} Dec 05 17:52:16 crc kubenswrapper[4961]: I1205 17:52:16.960832 4961 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openstack/openstackclient" oldPodUID="1632484b-b7d9-40a3-a1c0-9bf237beb70b" podUID="ce8ed9d8-89a0-4d15-9f08-d30111d16a2e" Dec 05 17:52:17 crc kubenswrapper[4961]: I1205 17:52:17.266643 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/cinder-scheduler-0" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.647554 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/swift-proxy-5d77d54f6c-4pccf"] Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.667239 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.672110 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"swift-proxy-config-data" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.672651 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-public-svc" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.673543 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-swift-internal-svc" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.684482 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5d77d54f6c-4pccf"] Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768276 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l6jfp\" (UniqueName: \"kubernetes.io/projected/dcca830e-9231-4c67-b5fa-669102d7ecc8-kube-api-access-l6jfp\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768315 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-config-data\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768355 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-internal-tls-certs\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768538 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dcca830e-9231-4c67-b5fa-669102d7ecc8-log-httpd\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768700 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dcca830e-9231-4c67-b5fa-669102d7ecc8-run-httpd\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768787 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dcca830e-9231-4c67-b5fa-669102d7ecc8-etc-swift\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.768849 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-public-tls-certs\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.769078 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-combined-ca-bundle\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884229 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l6jfp\" (UniqueName: \"kubernetes.io/projected/dcca830e-9231-4c67-b5fa-669102d7ecc8-kube-api-access-l6jfp\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884311 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-config-data\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884433 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dcca830e-9231-4c67-b5fa-669102d7ecc8-log-httpd\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884465 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-internal-tls-certs\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884521 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dcca830e-9231-4c67-b5fa-669102d7ecc8-run-httpd\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884609 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dcca830e-9231-4c67-b5fa-669102d7ecc8-etc-swift\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884656 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-public-tls-certs\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.884852 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-combined-ca-bundle\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.887871 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dcca830e-9231-4c67-b5fa-669102d7ecc8-log-httpd\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.887948 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/dcca830e-9231-4c67-b5fa-669102d7ecc8-run-httpd\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.894946 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-combined-ca-bundle\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.895273 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-swift\" (UniqueName: \"kubernetes.io/projected/dcca830e-9231-4c67-b5fa-669102d7ecc8-etc-swift\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.896800 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-config-data\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.897861 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-public-tls-certs\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.903605 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/dcca830e-9231-4c67-b5fa-669102d7ecc8-internal-tls-certs\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:18 crc kubenswrapper[4961]: I1205 17:52:18.911136 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l6jfp\" (UniqueName: \"kubernetes.io/projected/dcca830e-9231-4c67-b5fa-669102d7ecc8-kube-api-access-l6jfp\") pod \"swift-proxy-5d77d54f6c-4pccf\" (UID: \"dcca830e-9231-4c67-b5fa-669102d7ecc8\") " pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.018413 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.584687 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.585465 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-central-agent" containerID="cri-o://7d2e1d7a1ef48f7abf6b14ad0ca0b52ad4571421a23555acfd6586457b70174e" gracePeriod=30 Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.585576 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-notification-agent" containerID="cri-o://72f0c5cc6585e3ff2f634480cd51db4ec1a91ccc8e98fc69da63cbb0a669335f" gracePeriod=30 Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.585728 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="proxy-httpd" containerID="cri-o://3b20f58771e336b947ebe3957ceaae39aa08b59631e2240f21effe3cb0d2b50c" gracePeriod=30 Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.585829 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="sg-core" containerID="cri-o://ffd086d664dc56d275e927d1d95a64e5f2c85e9ec384e7902e8b15d3a9fec9a2" gracePeriod=30 Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.601637 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="proxy-httpd" probeResult="failure" output="Get \"http://10.217.0.165:3000/\": EOF" Dec 05 17:52:19 crc kubenswrapper[4961]: I1205 17:52:19.645223 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/swift-proxy-5d77d54f6c-4pccf"] Dec 05 17:52:20 crc kubenswrapper[4961]: I1205 17:52:20.009342 4961 generic.go:334] "Generic (PLEG): container finished" podID="083ba775-2cc0-4528-9712-27574bbf5df7" containerID="3b20f58771e336b947ebe3957ceaae39aa08b59631e2240f21effe3cb0d2b50c" exitCode=0 Dec 05 17:52:20 crc kubenswrapper[4961]: I1205 17:52:20.009741 4961 generic.go:334] "Generic (PLEG): container finished" podID="083ba775-2cc0-4528-9712-27574bbf5df7" containerID="ffd086d664dc56d275e927d1d95a64e5f2c85e9ec384e7902e8b15d3a9fec9a2" exitCode=2 Dec 05 17:52:20 crc kubenswrapper[4961]: I1205 17:52:20.009808 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerDied","Data":"3b20f58771e336b947ebe3957ceaae39aa08b59631e2240f21effe3cb0d2b50c"} Dec 05 17:52:20 crc kubenswrapper[4961]: I1205 17:52:20.009836 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerDied","Data":"ffd086d664dc56d275e927d1d95a64e5f2c85e9ec384e7902e8b15d3a9fec9a2"} Dec 05 17:52:20 crc kubenswrapper[4961]: I1205 17:52:20.025831 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5d77d54f6c-4pccf" event={"ID":"dcca830e-9231-4c67-b5fa-669102d7ecc8","Type":"ContainerStarted","Data":"2ce3cce7d079567011a36f21e10d515139d1bcc2a6750d897201a210865d4dd4"} Dec 05 17:52:20 crc kubenswrapper[4961]: I1205 17:52:20.025923 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5d77d54f6c-4pccf" event={"ID":"dcca830e-9231-4c67-b5fa-669102d7ecc8","Type":"ContainerStarted","Data":"f89711c2fcc452a475f27688b581a58155001fb1a0b0bf8a1a27989e81900873"} Dec 05 17:52:21 crc kubenswrapper[4961]: I1205 17:52:21.039596 4961 generic.go:334] "Generic (PLEG): container finished" podID="083ba775-2cc0-4528-9712-27574bbf5df7" containerID="7d2e1d7a1ef48f7abf6b14ad0ca0b52ad4571421a23555acfd6586457b70174e" exitCode=0 Dec 05 17:52:21 crc kubenswrapper[4961]: I1205 17:52:21.039704 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerDied","Data":"7d2e1d7a1ef48f7abf6b14ad0ca0b52ad4571421a23555acfd6586457b70174e"} Dec 05 17:52:21 crc kubenswrapper[4961]: I1205 17:52:21.044190 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/swift-proxy-5d77d54f6c-4pccf" event={"ID":"dcca830e-9231-4c67-b5fa-669102d7ecc8","Type":"ContainerStarted","Data":"48d437fcf276aaf318761bd9b36b94a25f6ed51081cc53453400f5bb6b130982"} Dec 05 17:52:21 crc kubenswrapper[4961]: I1205 17:52:21.044500 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:21 crc kubenswrapper[4961]: I1205 17:52:21.044532 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:21 crc kubenswrapper[4961]: I1205 17:52:21.083429 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/swift-proxy-5d77d54f6c-4pccf" podStartSLOduration=3.083403923 podStartE2EDuration="3.083403923s" podCreationTimestamp="2025-12-05 17:52:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:21.082030189 +0000 UTC m=+1147.143180682" watchObservedRunningTime="2025-12-05 17:52:21.083403923 +0000 UTC m=+1147.144554396" Dec 05 17:52:22 crc kubenswrapper[4961]: I1205 17:52:22.221288 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:52:22 crc kubenswrapper[4961]: I1205 17:52:22.222213 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-log" containerID="cri-o://1236fafbb9e2e5cc64b3b0cf733f5e0bcc978e52c4a17a565c5198715611d214" gracePeriod=30 Dec 05 17:52:22 crc kubenswrapper[4961]: I1205 17:52:22.222451 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-internal-api-0" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-httpd" containerID="cri-o://140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c" gracePeriod=30 Dec 05 17:52:22 crc kubenswrapper[4961]: I1205 17:52:22.560173 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/cinder-scheduler-0" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.071504 4961 generic.go:334] "Generic (PLEG): container finished" podID="083ba775-2cc0-4528-9712-27574bbf5df7" containerID="72f0c5cc6585e3ff2f634480cd51db4ec1a91ccc8e98fc69da63cbb0a669335f" exitCode=0 Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.071608 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerDied","Data":"72f0c5cc6585e3ff2f634480cd51db4ec1a91ccc8e98fc69da63cbb0a669335f"} Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.075162 4961 generic.go:334] "Generic (PLEG): container finished" podID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerID="1236fafbb9e2e5cc64b3b0cf733f5e0bcc978e52c4a17a565c5198715611d214" exitCode=143 Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.075205 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8","Type":"ContainerDied","Data":"1236fafbb9e2e5cc64b3b0cf733f5e0bcc978e52c4a17a565c5198715611d214"} Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.706076 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-db-create-8zqnt"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.707898 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.725828 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8zqnt"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.728382 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n74l7\" (UniqueName: \"kubernetes.io/projected/f1728b96-e1ba-4e27-a120-6f6fd3e85437-kube-api-access-n74l7\") pod \"nova-api-db-create-8zqnt\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.728411 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1728b96-e1ba-4e27-a120-6f6fd3e85437-operator-scripts\") pod \"nova-api-db-create-8zqnt\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.786114 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-db-create-bmgv7"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.788217 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.804137 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bmgv7"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.824847 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.825241 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-log" containerID="cri-o://26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad" gracePeriod=30 Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.825479 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/glance-default-external-api-0" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-httpd" containerID="cri-o://eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981" gracePeriod=30 Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.831369 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n74l7\" (UniqueName: \"kubernetes.io/projected/f1728b96-e1ba-4e27-a120-6f6fd3e85437-kube-api-access-n74l7\") pod \"nova-api-db-create-8zqnt\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.831418 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1728b96-e1ba-4e27-a120-6f6fd3e85437-operator-scripts\") pod \"nova-api-db-create-8zqnt\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.838181 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1728b96-e1ba-4e27-a120-6f6fd3e85437-operator-scripts\") pod \"nova-api-db-create-8zqnt\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.845084 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-6c6c-account-create-update-hsnmn"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.856705 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.858063 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6c6c-account-create-update-hsnmn"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.863284 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-db-secret" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.889408 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n74l7\" (UniqueName: \"kubernetes.io/projected/f1728b96-e1ba-4e27-a120-6f6fd3e85437-kube-api-access-n74l7\") pod \"nova-api-db-create-8zqnt\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.898860 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-db-create-7gphb"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.900482 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.908763 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7gphb"] Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.934917 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tktqf\" (UniqueName: \"kubernetes.io/projected/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-kube-api-access-tktqf\") pod \"nova-cell0-db-create-bmgv7\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:23 crc kubenswrapper[4961]: I1205 17:52:23.935051 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-operator-scripts\") pod \"nova-cell0-db-create-bmgv7\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.036554 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.037156 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763814d3-5adb-4523-8baa-1ca7f7ecc86b-operator-scripts\") pod \"nova-cell1-db-create-7gphb\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.037326 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m96ps\" (UniqueName: \"kubernetes.io/projected/763814d3-5adb-4523-8baa-1ca7f7ecc86b-kube-api-access-m96ps\") pod \"nova-cell1-db-create-7gphb\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.037408 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/276d90d2-7b34-45be-a492-89dc67929102-operator-scripts\") pod \"nova-api-6c6c-account-create-update-hsnmn\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.037440 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jm7nk\" (UniqueName: \"kubernetes.io/projected/276d90d2-7b34-45be-a492-89dc67929102-kube-api-access-jm7nk\") pod \"nova-api-6c6c-account-create-update-hsnmn\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.037547 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tktqf\" (UniqueName: \"kubernetes.io/projected/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-kube-api-access-tktqf\") pod \"nova-cell0-db-create-bmgv7\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.037659 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-operator-scripts\") pod \"nova-cell0-db-create-bmgv7\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.040090 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-operator-scripts\") pod \"nova-cell0-db-create-bmgv7\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.065087 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tktqf\" (UniqueName: \"kubernetes.io/projected/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-kube-api-access-tktqf\") pod \"nova-cell0-db-create-bmgv7\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.110670 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-5809-account-create-update-5n4c9"] Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.112080 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.118649 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-db-secret" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.120854 4961 generic.go:334] "Generic (PLEG): container finished" podID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerID="26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad" exitCode=143 Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.120907 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b07a4-55ec-4870-9e27-3b6e4d250b67","Type":"ContainerDied","Data":"26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad"} Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.124005 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.130791 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5809-account-create-update-5n4c9"] Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.141075 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/276d90d2-7b34-45be-a492-89dc67929102-operator-scripts\") pod \"nova-api-6c6c-account-create-update-hsnmn\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.141144 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jm7nk\" (UniqueName: \"kubernetes.io/projected/276d90d2-7b34-45be-a492-89dc67929102-kube-api-access-jm7nk\") pod \"nova-api-6c6c-account-create-update-hsnmn\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.141268 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763814d3-5adb-4523-8baa-1ca7f7ecc86b-operator-scripts\") pod \"nova-cell1-db-create-7gphb\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.141323 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m96ps\" (UniqueName: \"kubernetes.io/projected/763814d3-5adb-4523-8baa-1ca7f7ecc86b-kube-api-access-m96ps\") pod \"nova-cell1-db-create-7gphb\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.143029 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/276d90d2-7b34-45be-a492-89dc67929102-operator-scripts\") pod \"nova-api-6c6c-account-create-update-hsnmn\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.143860 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763814d3-5adb-4523-8baa-1ca7f7ecc86b-operator-scripts\") pod \"nova-cell1-db-create-7gphb\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.178852 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jm7nk\" (UniqueName: \"kubernetes.io/projected/276d90d2-7b34-45be-a492-89dc67929102-kube-api-access-jm7nk\") pod \"nova-api-6c6c-account-create-update-hsnmn\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.194526 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m96ps\" (UniqueName: \"kubernetes.io/projected/763814d3-5adb-4523-8baa-1ca7f7ecc86b-kube-api-access-m96ps\") pod \"nova-cell1-db-create-7gphb\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.222352 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-f95f-account-create-update-bkjnh"] Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.223609 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.230437 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-db-secret" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.242699 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-86lfc\" (UniqueName: \"kubernetes.io/projected/9ed81d6a-5172-4362-b7da-02f4552cb45b-kube-api-access-86lfc\") pod \"nova-cell0-5809-account-create-update-5n4c9\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.242870 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ed81d6a-5172-4362-b7da-02f4552cb45b-operator-scripts\") pod \"nova-cell0-5809-account-create-update-5n4c9\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.246080 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.256967 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f95f-account-create-update-bkjnh"] Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.273936 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.292246 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.344525 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-86lfc\" (UniqueName: \"kubernetes.io/projected/9ed81d6a-5172-4362-b7da-02f4552cb45b-kube-api-access-86lfc\") pod \"nova-cell0-5809-account-create-update-5n4c9\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.344943 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ed81d6a-5172-4362-b7da-02f4552cb45b-operator-scripts\") pod \"nova-cell0-5809-account-create-update-5n4c9\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.345070 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-operator-scripts\") pod \"nova-cell1-f95f-account-create-update-bkjnh\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.345174 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9c5vl\" (UniqueName: \"kubernetes.io/projected/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-kube-api-access-9c5vl\") pod \"nova-cell1-f95f-account-create-update-bkjnh\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.347016 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ed81d6a-5172-4362-b7da-02f4552cb45b-operator-scripts\") pod \"nova-cell0-5809-account-create-update-5n4c9\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.369408 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-86lfc\" (UniqueName: \"kubernetes.io/projected/9ed81d6a-5172-4362-b7da-02f4552cb45b-kube-api-access-86lfc\") pod \"nova-cell0-5809-account-create-update-5n4c9\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.432845 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.447942 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-operator-scripts\") pod \"nova-cell1-f95f-account-create-update-bkjnh\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.448036 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9c5vl\" (UniqueName: \"kubernetes.io/projected/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-kube-api-access-9c5vl\") pod \"nova-cell1-f95f-account-create-update-bkjnh\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.449181 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-operator-scripts\") pod \"nova-cell1-f95f-account-create-update-bkjnh\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.466422 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9c5vl\" (UniqueName: \"kubernetes.io/projected/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-kube-api-access-9c5vl\") pod \"nova-cell1-f95f-account-create-update-bkjnh\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:24 crc kubenswrapper[4961]: I1205 17:52:24.628078 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:25 crc kubenswrapper[4961]: I1205 17:52:25.120796 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/horizon-65bb59d746-cqlw9" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" probeResult="failure" output="Get \"https://10.217.0.145:8443/dashboard/auth/login/?next=/dashboard/\": dial tcp 10.217.0.145:8443: connect: connection refused" Dec 05 17:52:25 crc kubenswrapper[4961]: I1205 17:52:25.120972 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:52:26 crc kubenswrapper[4961]: I1205 17:52:26.158103 4961 generic.go:334] "Generic (PLEG): container finished" podID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerID="140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c" exitCode=0 Dec 05 17:52:26 crc kubenswrapper[4961]: I1205 17:52:26.158177 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8","Type":"ContainerDied","Data":"140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c"} Dec 05 17:52:27 crc kubenswrapper[4961]: I1205 17:52:27.180908 4961 generic.go:334] "Generic (PLEG): container finished" podID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerID="eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981" exitCode=0 Dec 05 17:52:27 crc kubenswrapper[4961]: I1205 17:52:27.180955 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b07a4-55ec-4870-9e27-3b6e4d250b67","Type":"ContainerDied","Data":"eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981"} Dec 05 17:52:27 crc kubenswrapper[4961]: I1205 17:52:27.246522 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:52:27 crc kubenswrapper[4961]: I1205 17:52:27.246587 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.049277 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.222242 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"083ba775-2cc0-4528-9712-27574bbf5df7","Type":"ContainerDied","Data":"838b5bc176ccb3ffbd0846ee4a5847ea01b289b44eb1475a202173ddb76d7329"} Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.222849 4961 scope.go:117] "RemoveContainer" containerID="3b20f58771e336b947ebe3957ceaae39aa08b59631e2240f21effe3cb0d2b50c" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.223386 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.243620 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-log-httpd\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.243691 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-run-httpd\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.243711 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-sg-core-conf-yaml\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.243941 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qdvz7\" (UniqueName: \"kubernetes.io/projected/083ba775-2cc0-4528-9712-27574bbf5df7-kube-api-access-qdvz7\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244034 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-combined-ca-bundle\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244168 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-scripts\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244225 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-config-data\") pod \"083ba775-2cc0-4528-9712-27574bbf5df7\" (UID: \"083ba775-2cc0-4528-9712-27574bbf5df7\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244422 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244443 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244767 4961 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.244845 4961 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/083ba775-2cc0-4528-9712-27574bbf5df7-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.254470 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-scripts" (OuterVolumeSpecName: "scripts") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.255188 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/083ba775-2cc0-4528-9712-27574bbf5df7-kube-api-access-qdvz7" (OuterVolumeSpecName: "kube-api-access-qdvz7") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "kube-api-access-qdvz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.264998 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.291931 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.305594 4961 scope.go:117] "RemoveContainer" containerID="ffd086d664dc56d275e927d1d95a64e5f2c85e9ec384e7902e8b15d3a9fec9a2" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.346932 4961 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.346976 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qdvz7\" (UniqueName: \"kubernetes.io/projected/083ba775-2cc0-4528-9712-27574bbf5df7-kube-api-access-qdvz7\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.346991 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.418032 4961 scope.go:117] "RemoveContainer" containerID="72f0c5cc6585e3ff2f634480cd51db4ec1a91ccc8e98fc69da63cbb0a669335f" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448005 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448166 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-httpd-run\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448199 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-scripts\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448295 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-public-tls-certs\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448409 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-combined-ca-bundle\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448438 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448589 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-logs\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448629 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-config-data\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.448689 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-84rxt\" (UniqueName: \"kubernetes.io/projected/792b07a4-55ec-4870-9e27-3b6e4d250b67-kube-api-access-84rxt\") pod \"792b07a4-55ec-4870-9e27-3b6e4d250b67\" (UID: \"792b07a4-55ec-4870-9e27-3b6e4d250b67\") " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.449541 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.450013 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-logs" (OuterVolumeSpecName: "logs") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.451548 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.454678 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-scripts" (OuterVolumeSpecName: "scripts") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.460556 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/792b07a4-55ec-4870-9e27-3b6e4d250b67-kube-api-access-84rxt" (OuterVolumeSpecName: "kube-api-access-84rxt") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "kube-api-access-84rxt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.473221 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage07-crc" (OuterVolumeSpecName: "glance") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "local-storage07-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.557090 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" " Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.557155 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.557198 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-84rxt\" (UniqueName: \"kubernetes.io/projected/792b07a4-55ec-4870-9e27-3b6e4d250b67-kube-api-access-84rxt\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.557214 4961 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/792b07a4-55ec-4870-9e27-3b6e4d250b67-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.557225 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.563558 4961 scope.go:117] "RemoveContainer" containerID="7d2e1d7a1ef48f7abf6b14ad0ca0b52ad4571421a23555acfd6586457b70174e" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.677457 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage07-crc" (UniqueName: "kubernetes.io/local-volume/local-storage07-crc") on node "crc" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.689321 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.727002 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-config-data" (OuterVolumeSpecName: "config-data") pod "083ba775-2cc0-4528-9712-27574bbf5df7" (UID: "083ba775-2cc0-4528-9712-27574bbf5df7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.761355 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/083ba775-2cc0-4528-9712-27574bbf5df7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.761400 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.761416 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.774368 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.809005 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-config-data" (OuterVolumeSpecName: "config-data") pod "792b07a4-55ec-4870-9e27-3b6e4d250b67" (UID: "792b07a4-55ec-4870-9e27-3b6e4d250b67"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.867095 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.867131 4961 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/792b07a4-55ec-4870-9e27-3b6e4d250b67-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.922705 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.928895 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.939949 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998221 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998592 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="proxy-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998610 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="proxy-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998625 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-log" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998634 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-log" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998647 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-central-agent" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998653 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-central-agent" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998664 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998671 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998686 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-log" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998692 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-log" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998698 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="sg-core" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998704 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="sg-core" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998715 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-notification-agent" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998721 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-notification-agent" Dec 05 17:52:28 crc kubenswrapper[4961]: E1205 17:52:28.998734 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998740 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998916 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-notification-agent" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998933 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="proxy-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998942 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="sg-core" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998952 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998961 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" containerName="ceilometer-central-agent" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998972 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-httpd" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998981 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" containerName="glance-log" Dec 05 17:52:28 crc kubenswrapper[4961]: I1205 17:52:28.998990 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" containerName="glance-log" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.000754 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.003786 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.004270 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.016760 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.031920 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/swift-proxy-5d77d54f6c-4pccf" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.071928 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-scripts\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.072368 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-combined-ca-bundle\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.073475 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"glance\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.073695 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-logs\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.073755 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-internal-tls-certs\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.073861 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-httpd-run\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.073897 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-config-data\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.073981 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmx5j\" (UniqueName: \"kubernetes.io/projected/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-kube-api-access-hmx5j\") pod \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\" (UID: \"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8\") " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.075614 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-httpd-run" (OuterVolumeSpecName: "httpd-run") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "httpd-run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.076162 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-logs" (OuterVolumeSpecName: "logs") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.078220 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-scripts" (OuterVolumeSpecName: "scripts") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.079860 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-kube-api-access-hmx5j" (OuterVolumeSpecName: "kube-api-access-hmx5j") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "kube-api-access-hmx5j". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.092496 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage10-crc" (OuterVolumeSpecName: "glance") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "local-storage10-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.143239 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.181569 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-log-httpd\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.199636 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-config-data\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.204006 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cbx7k\" (UniqueName: \"kubernetes.io/projected/b0b81627-84e0-452a-b8c4-18c889d1d0b1-kube-api-access-cbx7k\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.204100 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-scripts\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.204179 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.204398 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205459 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-run-httpd\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205676 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmx5j\" (UniqueName: \"kubernetes.io/projected/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-kube-api-access-hmx5j\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205697 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205714 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205747 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" " Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205765 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.205797 4961 reconciler_common.go:293] "Volume detached for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-httpd-run\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.243288 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-6c6c-account-create-update-hsnmn"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.253204 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"792b07a4-55ec-4870-9e27-3b6e4d250b67","Type":"ContainerDied","Data":"3497a998dc55834b0715a3369f9839f20b67a48110a058634ad6454f26c13b8d"} Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.253295 4961 scope.go:117] "RemoveContainer" containerID="eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.253440 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.253511 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.258607 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" event={"ID":"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56","Type":"ContainerStarted","Data":"aec0718594d094886ec8c01475a562dee360b164db6640d3cecd0c0aa9b7419b"} Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.268092 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage10-crc" (UniqueName: "kubernetes.io/local-volume/local-storage10-crc") on node "crc" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.272488 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"ce20cb74-34b1-4745-92c7-2a4fdda5b0d8","Type":"ContainerDied","Data":"039498f303d51baece9b54bac81344956ca84e7a1bd1fd79a37cc5728093db00"} Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.272605 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.282278 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-f95f-account-create-update-bkjnh"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.282320 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/openstackclient" event={"ID":"ce8ed9d8-89a0-4d15-9f08-d30111d16a2e","Type":"ContainerStarted","Data":"a33a66a6a088df7a3b097249e382839abde1f2afcb69486599994563fefabb87"} Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.292621 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-db-create-8zqnt"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.305467 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-config-data" (OuterVolumeSpecName: "config-data") pod "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" (UID: "ce20cb74-34b1-4745-92c7-2a4fdda5b0d8"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307094 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-run-httpd\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307163 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-log-httpd\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307187 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-config-data\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307253 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cbx7k\" (UniqueName: \"kubernetes.io/projected/b0b81627-84e0-452a-b8c4-18c889d1d0b1-kube-api-access-cbx7k\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307293 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-scripts\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307327 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307416 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307513 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307529 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.307543 4961 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.314469 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-run-httpd\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.317810 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.318151 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-log-httpd\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.320291 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.325075 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-config-data\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.325637 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-scripts\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.326515 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-5809-account-create-update-5n4c9"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.340040 4961 scope.go:117] "RemoveContainer" containerID="26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.346792 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-db-create-bmgv7"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.355950 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-db-create-7gphb"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.364616 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.372930 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.385235 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/openstackclient" podStartSLOduration=2.404871176 podStartE2EDuration="14.385209964s" podCreationTimestamp="2025-12-05 17:52:15 +0000 UTC" firstStartedPulling="2025-12-05 17:52:16.008868315 +0000 UTC m=+1142.070018788" lastFinishedPulling="2025-12-05 17:52:27.989207103 +0000 UTC m=+1154.050357576" observedRunningTime="2025-12-05 17:52:29.313164044 +0000 UTC m=+1155.374314517" watchObservedRunningTime="2025-12-05 17:52:29.385209964 +0000 UTC m=+1155.446360447" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.387651 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.389494 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.391246 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cbx7k\" (UniqueName: \"kubernetes.io/projected/b0b81627-84e0-452a-b8c4-18c889d1d0b1-kube-api-access-cbx7k\") pod \"ceilometer-0\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.400332 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-external-config-data" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.401373 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-public-svc" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.408696 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.514845 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.514970 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.515052 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-scripts\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.515102 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.515162 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-config-data\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.515235 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-logs\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.515280 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lgm2\" (UniqueName: \"kubernetes.io/projected/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-kube-api-access-6lgm2\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.515335 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.565976 4961 scope.go:117] "RemoveContainer" containerID="140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617018 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-logs\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617083 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lgm2\" (UniqueName: \"kubernetes.io/projected/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-kube-api-access-6lgm2\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617130 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617158 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617202 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617250 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-scripts\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617281 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617321 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-config-data\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.617698 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-logs\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.618077 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-httpd-run\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.618734 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") device mount path \"/mnt/openstack/pv07\"" pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.629793 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-config-data\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.634612 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.636624 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-scripts\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.646623 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-public-tls-certs\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.650881 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lgm2\" (UniqueName: \"kubernetes.io/projected/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-kube-api-access-6lgm2\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.651753 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96-combined-ca-bundle\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.769883 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage07-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage07-crc\") pod \"glance-default-external-api-0\" (UID: \"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96\") " pod="openstack/glance-default-external-api-0" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.894654 4961 scope.go:117] "RemoveContainer" containerID="1236fafbb9e2e5cc64b3b0cf733f5e0bcc978e52c4a17a565c5198715611d214" Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.961888 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:52:29 crc kubenswrapper[4961]: I1205 17:52:29.991852 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.003622 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.006092 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.009794 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-glance-default-internal-svc" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.010108 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"glance-default-internal-config-data" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.018333 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.050994 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-external-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178104 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sd5dd\" (UniqueName: \"kubernetes.io/projected/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-kube-api-access-sd5dd\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178503 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178540 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178575 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178641 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178679 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.178702 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-logs\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.194231 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.270991 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299060 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sd5dd\" (UniqueName: \"kubernetes.io/projected/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-kube-api-access-sd5dd\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299183 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299225 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299252 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299293 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299329 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299351 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-logs\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.299420 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.301193 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") device mount path \"/mnt/openstack/pv10\"" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.303975 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-logs\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.304339 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"httpd-run\" (UniqueName: \"kubernetes.io/empty-dir/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-httpd-run\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.309684 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" event={"ID":"9ed81d6a-5172-4362-b7da-02f4552cb45b","Type":"ContainerStarted","Data":"0456527d2214ee7e3eaad127b86ae5f87367f4f6bb847d6a6059955e9f952015"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.314125 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" event={"ID":"276d90d2-7b34-45be-a492-89dc67929102","Type":"ContainerStarted","Data":"4b8f995fae745c0435941c2bcb6125cfe1428c628f7286e959a633d7e9ba1ff8"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.314165 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" event={"ID":"276d90d2-7b34-45be-a492-89dc67929102","Type":"ContainerStarted","Data":"4cae16a05db0d89eaccbdb12c28d13eaeeca3e17a0e22405a85c6ae86adb12c1"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.316128 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bmgv7" event={"ID":"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc","Type":"ContainerStarted","Data":"dec16c7288e2e83ecb03772573d03ac8062ed48a3b8ebca6c5c8dd2fcf18414c"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.317150 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7gphb" event={"ID":"763814d3-5adb-4523-8baa-1ca7f7ecc86b","Type":"ContainerStarted","Data":"28834f50c64e87ed9b452448524ad677312836d1dc4c37c8c88c3ba53524ec31"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.319220 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-combined-ca-bundle\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.320225 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-scripts\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.320480 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-internal-tls-certs\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.321398 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sd5dd\" (UniqueName: \"kubernetes.io/projected/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-kube-api-access-sd5dd\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.326758 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3ac5ebd0-a089-428c-a698-cbd1f6c50c57-config-data\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.337235 4961 generic.go:334] "Generic (PLEG): container finished" podID="79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" containerID="0c159959b35f5fb1ae069ea0f318fda3d75cf71e46117fb47768e842a99f6a0a" exitCode=0 Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.337355 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" event={"ID":"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56","Type":"ContainerDied","Data":"0c159959b35f5fb1ae069ea0f318fda3d75cf71e46117fb47768e842a99f6a0a"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.339726 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" podStartSLOduration=7.3397065 podStartE2EDuration="7.3397065s" podCreationTimestamp="2025-12-05 17:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:30.332668677 +0000 UTC m=+1156.393819160" watchObservedRunningTime="2025-12-05 17:52:30.3397065 +0000 UTC m=+1156.400856973" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.341848 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8zqnt" event={"ID":"f1728b96-e1ba-4e27-a120-6f6fd3e85437","Type":"ContainerStarted","Data":"3f827d79ecc877f5e583594a3ff5b569710ad0831e0d2042f0aa2c1ddb39ca7e"} Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.490834 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage10-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage10-crc\") pod \"glance-default-internal-api-0\" (UID: \"3ac5ebd0-a089-428c-a698-cbd1f6c50c57\") " pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.629293 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.833623 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-external-api-0"] Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.884909 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="083ba775-2cc0-4528-9712-27574bbf5df7" path="/var/lib/kubelet/pods/083ba775-2cc0-4528-9712-27574bbf5df7/volumes" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.886102 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="792b07a4-55ec-4870-9e27-3b6e4d250b67" path="/var/lib/kubelet/pods/792b07a4-55ec-4870-9e27-3b6e4d250b67/volumes" Dec 05 17:52:30 crc kubenswrapper[4961]: I1205 17:52:30.887365 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ce20cb74-34b1-4745-92c7-2a4fdda5b0d8" path="/var/lib/kubelet/pods/ce20cb74-34b1-4745-92c7-2a4fdda5b0d8/volumes" Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.256577 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/glance-default-internal-api-0"] Dec 05 17:52:31 crc kubenswrapper[4961]: W1205 17:52:31.274913 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3ac5ebd0_a089_428c_a698_cbd1f6c50c57.slice/crio-a81ca875d13d3cbf2c2b98245d6b32465d419aef464fefdddfc652cf0af72e4b WatchSource:0}: Error finding container a81ca875d13d3cbf2c2b98245d6b32465d419aef464fefdddfc652cf0af72e4b: Status 404 returned error can't find the container with id a81ca875d13d3cbf2c2b98245d6b32465d419aef464fefdddfc652cf0af72e4b Dec 05 17:52:31 crc kubenswrapper[4961]: E1205 17:52:31.346324 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b07a4_55ec_4870_9e27_3b6e4d250b67.slice/crio-conmon-26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b07a4_55ec_4870_9e27_3b6e4d250b67.slice/crio-eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce20cb74_34b1_4745_92c7_2a4fdda5b0d8.slice/crio-conmon-140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce20cb74_34b1_4745_92c7_2a4fdda5b0d8.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod083ba775_2cc0_4528_9712_27574bbf5df7.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b07a4_55ec_4870_9e27_3b6e4d250b67.slice/crio-conmon-eb34cea83c0ee346263a0b71ab34ca9f157690a05f117f19a676a5ef7ed49981.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b07a4_55ec_4870_9e27_3b6e4d250b67.slice\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod01eef206_68f4_4923_8253_2a130ba0dca3.slice/crio-b962d0a3c530d2aeff022acebc2fd4c86c94ea227e97d35c58d1ba851a3abe96.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce20cb74_34b1_4745_92c7_2a4fdda5b0d8.slice/crio-140e97691d78b8e3d30753328776e5fc9b2fb76022aa38ae6a0f7b0f6f7c298c.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b07a4_55ec_4870_9e27_3b6e4d250b67.slice/crio-26653f35aae08d87656ae9a104b448a17c8dfaa8f58107e0edc9239f4a5ad9ad.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podce20cb74_34b1_4745_92c7_2a4fdda5b0d8.slice/crio-039498f303d51baece9b54bac81344956ca84e7a1bd1fd79a37cc5728093db00\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod792b07a4_55ec_4870_9e27_3b6e4d250b67.slice/crio-3497a998dc55834b0715a3369f9839f20b67a48110a058634ad6454f26c13b8d\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod083ba775_2cc0_4528_9712_27574bbf5df7.slice/crio-838b5bc176ccb3ffbd0846ee4a5847ea01b289b44eb1475a202173ddb76d7329\": RecentStats: unable to find data in memory cache]" Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.360421 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96","Type":"ContainerStarted","Data":"0771d5c3756fa90879c42fee7f0f33e76e89eaf28027f9704e42e6df23e7a400"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.386457 4961 generic.go:334] "Generic (PLEG): container finished" podID="01eef206-68f4-4923-8253-2a130ba0dca3" containerID="b962d0a3c530d2aeff022acebc2fd4c86c94ea227e97d35c58d1ba851a3abe96" exitCode=137 Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.386831 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65bb59d746-cqlw9" event={"ID":"01eef206-68f4-4923-8253-2a130ba0dca3","Type":"ContainerDied","Data":"b962d0a3c530d2aeff022acebc2fd4c86c94ea227e97d35c58d1ba851a3abe96"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.405301 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bmgv7" event={"ID":"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc","Type":"ContainerStarted","Data":"a962669bb87dce720ca35fd1752e5a631befba4e2a10b94bb1a1b2a2cadef9f9"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.452879 4961 generic.go:334] "Generic (PLEG): container finished" podID="763814d3-5adb-4523-8baa-1ca7f7ecc86b" containerID="b5d5b6efaaef42c2d6310136042a05e2583aedfa1b3777bee0ec9b8b3b1e74e7" exitCode=0 Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.453042 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7gphb" event={"ID":"763814d3-5adb-4523-8baa-1ca7f7ecc86b","Type":"ContainerDied","Data":"b5d5b6efaaef42c2d6310136042a05e2583aedfa1b3777bee0ec9b8b3b1e74e7"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.466967 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-db-create-bmgv7" podStartSLOduration=8.466926528 podStartE2EDuration="8.466926528s" podCreationTimestamp="2025-12-05 17:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:31.461539906 +0000 UTC m=+1157.522690379" watchObservedRunningTime="2025-12-05 17:52:31.466926528 +0000 UTC m=+1157.528077001" Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.471543 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" event={"ID":"9ed81d6a-5172-4362-b7da-02f4552cb45b","Type":"ContainerStarted","Data":"308dd2f22a6c6777855cc9d754902ac4a7ce3a59b8ac8c53e85361eca2c09457"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.489335 4961 generic.go:334] "Generic (PLEG): container finished" podID="276d90d2-7b34-45be-a492-89dc67929102" containerID="4b8f995fae745c0435941c2bcb6125cfe1428c628f7286e959a633d7e9ba1ff8" exitCode=0 Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.489473 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" event={"ID":"276d90d2-7b34-45be-a492-89dc67929102","Type":"ContainerDied","Data":"4b8f995fae745c0435941c2bcb6125cfe1428c628f7286e959a633d7e9ba1ff8"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.502575 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8zqnt" event={"ID":"f1728b96-e1ba-4e27-a120-6f6fd3e85437","Type":"ContainerStarted","Data":"8c1c0fe9d0c89482eee963cca2e897add7e5c0fc6031477e2f0cf7eeb00daab9"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.518910 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3ac5ebd0-a089-428c-a698-cbd1f6c50c57","Type":"ContainerStarted","Data":"a81ca875d13d3cbf2c2b98245d6b32465d419aef464fefdddfc652cf0af72e4b"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.536204 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" podStartSLOduration=7.536171878 podStartE2EDuration="7.536171878s" podCreationTimestamp="2025-12-05 17:52:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:31.512429566 +0000 UTC m=+1157.573580039" watchObservedRunningTime="2025-12-05 17:52:31.536171878 +0000 UTC m=+1157.597322351" Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.545308 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerStarted","Data":"ee1b1da1b017744d6a8b469ae9ec9c23f318865e5ac42e07807bbb35c2745d29"} Dec 05 17:52:31 crc kubenswrapper[4961]: I1205 17:52:31.569623 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-db-create-8zqnt" podStartSLOduration=8.569589589 podStartE2EDuration="8.569589589s" podCreationTimestamp="2025-12-05 17:52:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:31.534963139 +0000 UTC m=+1157.596113722" watchObservedRunningTime="2025-12-05 17:52:31.569589589 +0000 UTC m=+1157.630740072" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.038815 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.040187 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.182926 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lb54t\" (UniqueName: \"kubernetes.io/projected/01eef206-68f4-4923-8253-2a130ba0dca3-kube-api-access-lb54t\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183399 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-config-data\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183435 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-operator-scripts\") pod \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183478 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01eef206-68f4-4923-8253-2a130ba0dca3-logs\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183542 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-combined-ca-bundle\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183625 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-tls-certs\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183665 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9c5vl\" (UniqueName: \"kubernetes.io/projected/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-kube-api-access-9c5vl\") pod \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\" (UID: \"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183707 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-secret-key\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.183866 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-scripts\") pod \"01eef206-68f4-4923-8253-2a130ba0dca3\" (UID: \"01eef206-68f4-4923-8253-2a130ba0dca3\") " Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.184699 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" (UID: "79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.188056 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01eef206-68f4-4923-8253-2a130ba0dca3-logs" (OuterVolumeSpecName: "logs") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.193592 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-kube-api-access-9c5vl" (OuterVolumeSpecName: "kube-api-access-9c5vl") pod "79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" (UID: "79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56"). InnerVolumeSpecName "kube-api-access-9c5vl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.194245 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01eef206-68f4-4923-8253-2a130ba0dca3-kube-api-access-lb54t" (OuterVolumeSpecName: "kube-api-access-lb54t") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "kube-api-access-lb54t". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.197930 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-secret-key" (OuterVolumeSpecName: "horizon-secret-key") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "horizon-secret-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.241186 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.241290 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-scripts" (OuterVolumeSpecName: "scripts") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.267027 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-config-data" (OuterVolumeSpecName: "config-data") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.285985 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/01eef206-68f4-4923-8253-2a130ba0dca3-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286027 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286043 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9c5vl\" (UniqueName: \"kubernetes.io/projected/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-kube-api-access-9c5vl\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286054 4961 reconciler_common.go:293] "Volume detached for volume \"horizon-secret-key\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-secret-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286065 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286077 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lb54t\" (UniqueName: \"kubernetes.io/projected/01eef206-68f4-4923-8253-2a130ba0dca3-kube-api-access-lb54t\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286087 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/01eef206-68f4-4923-8253-2a130ba0dca3-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.286098 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.341958 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-tls-certs" (OuterVolumeSpecName: "horizon-tls-certs") pod "01eef206-68f4-4923-8253-2a130ba0dca3" (UID: "01eef206-68f4-4923-8253-2a130ba0dca3"). InnerVolumeSpecName "horizon-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.389923 4961 reconciler_common.go:293] "Volume detached for volume \"horizon-tls-certs\" (UniqueName: \"kubernetes.io/secret/01eef206-68f4-4923-8253-2a130ba0dca3-horizon-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.558923 4961 generic.go:334] "Generic (PLEG): container finished" podID="9ed81d6a-5172-4362-b7da-02f4552cb45b" containerID="308dd2f22a6c6777855cc9d754902ac4a7ce3a59b8ac8c53e85361eca2c09457" exitCode=0 Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.559109 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" event={"ID":"9ed81d6a-5172-4362-b7da-02f4552cb45b","Type":"ContainerDied","Data":"308dd2f22a6c6777855cc9d754902ac4a7ce3a59b8ac8c53e85361eca2c09457"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.562070 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3ac5ebd0-a089-428c-a698-cbd1f6c50c57","Type":"ContainerStarted","Data":"2d4c4564a77c44cba9ed5c9fd6cbfdb9c4e5f43e53a13b618b9ae9df1c7c07d0"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.569682 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerStarted","Data":"0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.569755 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerStarted","Data":"5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.590459 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96","Type":"ContainerStarted","Data":"6198c057317e4ea7de66539165c5dc4b7902a3e1ce927c134b45a4ea3d2b6aa5"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.597720 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/horizon-65bb59d746-cqlw9" event={"ID":"01eef206-68f4-4923-8253-2a130ba0dca3","Type":"ContainerDied","Data":"774812ddff6ecc48ef5bfe499a4b3c36832c79aa054864d429285d7f1156f23d"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.597764 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/horizon-65bb59d746-cqlw9" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.597840 4961 scope.go:117] "RemoveContainer" containerID="01aff3bd688c5b14a84d39236485cd65f7bf7cd0d578accde815230603894791" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.609278 4961 generic.go:334] "Generic (PLEG): container finished" podID="e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" containerID="a962669bb87dce720ca35fd1752e5a631befba4e2a10b94bb1a1b2a2cadef9f9" exitCode=0 Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.609612 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bmgv7" event={"ID":"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc","Type":"ContainerDied","Data":"a962669bb87dce720ca35fd1752e5a631befba4e2a10b94bb1a1b2a2cadef9f9"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.616467 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" event={"ID":"79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56","Type":"ContainerDied","Data":"aec0718594d094886ec8c01475a562dee360b164db6640d3cecd0c0aa9b7419b"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.616568 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aec0718594d094886ec8c01475a562dee360b164db6640d3cecd0c0aa9b7419b" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.616686 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-f95f-account-create-update-bkjnh" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.625624 4961 generic.go:334] "Generic (PLEG): container finished" podID="f1728b96-e1ba-4e27-a120-6f6fd3e85437" containerID="8c1c0fe9d0c89482eee963cca2e897add7e5c0fc6031477e2f0cf7eeb00daab9" exitCode=0 Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.625856 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8zqnt" event={"ID":"f1728b96-e1ba-4e27-a120-6f6fd3e85437","Type":"ContainerDied","Data":"8c1c0fe9d0c89482eee963cca2e897add7e5c0fc6031477e2f0cf7eeb00daab9"} Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.752927 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/horizon-65bb59d746-cqlw9"] Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.760483 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/horizon-65bb59d746-cqlw9"] Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.888578 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" path="/var/lib/kubelet/pods/01eef206-68f4-4923-8253-2a130ba0dca3/volumes" Dec 05 17:52:32 crc kubenswrapper[4961]: I1205 17:52:32.928943 4961 scope.go:117] "RemoveContainer" containerID="b962d0a3c530d2aeff022acebc2fd4c86c94ea227e97d35c58d1ba851a3abe96" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.184762 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.289859 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.324392 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763814d3-5adb-4523-8baa-1ca7f7ecc86b-operator-scripts\") pod \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.324514 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m96ps\" (UniqueName: \"kubernetes.io/projected/763814d3-5adb-4523-8baa-1ca7f7ecc86b-kube-api-access-m96ps\") pod \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\" (UID: \"763814d3-5adb-4523-8baa-1ca7f7ecc86b\") " Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.326061 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/763814d3-5adb-4523-8baa-1ca7f7ecc86b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "763814d3-5adb-4523-8baa-1ca7f7ecc86b" (UID: "763814d3-5adb-4523-8baa-1ca7f7ecc86b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.331937 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/763814d3-5adb-4523-8baa-1ca7f7ecc86b-kube-api-access-m96ps" (OuterVolumeSpecName: "kube-api-access-m96ps") pod "763814d3-5adb-4523-8baa-1ca7f7ecc86b" (UID: "763814d3-5adb-4523-8baa-1ca7f7ecc86b"). InnerVolumeSpecName "kube-api-access-m96ps". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.427153 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/276d90d2-7b34-45be-a492-89dc67929102-operator-scripts\") pod \"276d90d2-7b34-45be-a492-89dc67929102\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.427241 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jm7nk\" (UniqueName: \"kubernetes.io/projected/276d90d2-7b34-45be-a492-89dc67929102-kube-api-access-jm7nk\") pod \"276d90d2-7b34-45be-a492-89dc67929102\" (UID: \"276d90d2-7b34-45be-a492-89dc67929102\") " Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.428163 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m96ps\" (UniqueName: \"kubernetes.io/projected/763814d3-5adb-4523-8baa-1ca7f7ecc86b-kube-api-access-m96ps\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.428189 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/763814d3-5adb-4523-8baa-1ca7f7ecc86b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.428616 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/276d90d2-7b34-45be-a492-89dc67929102-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "276d90d2-7b34-45be-a492-89dc67929102" (UID: "276d90d2-7b34-45be-a492-89dc67929102"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.435184 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/276d90d2-7b34-45be-a492-89dc67929102-kube-api-access-jm7nk" (OuterVolumeSpecName: "kube-api-access-jm7nk") pod "276d90d2-7b34-45be-a492-89dc67929102" (UID: "276d90d2-7b34-45be-a492-89dc67929102"). InnerVolumeSpecName "kube-api-access-jm7nk". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.533542 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/276d90d2-7b34-45be-a492-89dc67929102-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.533796 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jm7nk\" (UniqueName: \"kubernetes.io/projected/276d90d2-7b34-45be-a492-89dc67929102-kube-api-access-jm7nk\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.675264 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" event={"ID":"276d90d2-7b34-45be-a492-89dc67929102","Type":"ContainerDied","Data":"4cae16a05db0d89eaccbdb12c28d13eaeeca3e17a0e22405a85c6ae86adb12c1"} Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.675309 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4cae16a05db0d89eaccbdb12c28d13eaeeca3e17a0e22405a85c6ae86adb12c1" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.675398 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-6c6c-account-create-update-hsnmn" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.691328 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerStarted","Data":"8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c"} Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.696522 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-external-api-0" event={"ID":"e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96","Type":"ContainerStarted","Data":"9e18f78b18dc25bb158c616dd77be6ee3ecd3ce41d49b4e7005d806bfb427a41"} Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.701307 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-db-create-7gphb" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.701313 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-db-create-7gphb" event={"ID":"763814d3-5adb-4523-8baa-1ca7f7ecc86b","Type":"ContainerDied","Data":"28834f50c64e87ed9b452448524ad677312836d1dc4c37c8c88c3ba53524ec31"} Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.701387 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28834f50c64e87ed9b452448524ad677312836d1dc4c37c8c88c3ba53524ec31" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.703542 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/glance-default-internal-api-0" event={"ID":"3ac5ebd0-a089-428c-a698-cbd1f6c50c57","Type":"ContainerStarted","Data":"e26689f43988d70f670e1d14806871b131276d8e55dbf94028f5ab41c763c502"} Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.739542 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-external-api-0" podStartSLOduration=4.73951523 podStartE2EDuration="4.73951523s" podCreationTimestamp="2025-12-05 17:52:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:33.7317454 +0000 UTC m=+1159.792895883" watchObservedRunningTime="2025-12-05 17:52:33.73951523 +0000 UTC m=+1159.800665703" Dec 05 17:52:33 crc kubenswrapper[4961]: I1205 17:52:33.768141 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/glance-default-internal-api-0" podStartSLOduration=4.768121914 podStartE2EDuration="4.768121914s" podCreationTimestamp="2025-12-05 17:52:29 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:52:33.760071046 +0000 UTC m=+1159.821221539" watchObservedRunningTime="2025-12-05 17:52:33.768121914 +0000 UTC m=+1159.829272387" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.301655 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.308168 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.316199 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.469506 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tktqf\" (UniqueName: \"kubernetes.io/projected/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-kube-api-access-tktqf\") pod \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.469825 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n74l7\" (UniqueName: \"kubernetes.io/projected/f1728b96-e1ba-4e27-a120-6f6fd3e85437-kube-api-access-n74l7\") pod \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.469929 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ed81d6a-5172-4362-b7da-02f4552cb45b-operator-scripts\") pod \"9ed81d6a-5172-4362-b7da-02f4552cb45b\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.470019 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-operator-scripts\") pod \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\" (UID: \"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc\") " Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.470041 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-86lfc\" (UniqueName: \"kubernetes.io/projected/9ed81d6a-5172-4362-b7da-02f4552cb45b-kube-api-access-86lfc\") pod \"9ed81d6a-5172-4362-b7da-02f4552cb45b\" (UID: \"9ed81d6a-5172-4362-b7da-02f4552cb45b\") " Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.470088 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1728b96-e1ba-4e27-a120-6f6fd3e85437-operator-scripts\") pod \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\" (UID: \"f1728b96-e1ba-4e27-a120-6f6fd3e85437\") " Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.470949 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f1728b96-e1ba-4e27-a120-6f6fd3e85437-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f1728b96-e1ba-4e27-a120-6f6fd3e85437" (UID: "f1728b96-e1ba-4e27-a120-6f6fd3e85437"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.473650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-kube-api-access-tktqf" (OuterVolumeSpecName: "kube-api-access-tktqf") pod "e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" (UID: "e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc"). InnerVolumeSpecName "kube-api-access-tktqf". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.474205 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9ed81d6a-5172-4362-b7da-02f4552cb45b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "9ed81d6a-5172-4362-b7da-02f4552cb45b" (UID: "9ed81d6a-5172-4362-b7da-02f4552cb45b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.474230 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f1728b96-e1ba-4e27-a120-6f6fd3e85437-kube-api-access-n74l7" (OuterVolumeSpecName: "kube-api-access-n74l7") pod "f1728b96-e1ba-4e27-a120-6f6fd3e85437" (UID: "f1728b96-e1ba-4e27-a120-6f6fd3e85437"). InnerVolumeSpecName "kube-api-access-n74l7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.474222 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" (UID: "e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.477944 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9ed81d6a-5172-4362-b7da-02f4552cb45b-kube-api-access-86lfc" (OuterVolumeSpecName: "kube-api-access-86lfc") pod "9ed81d6a-5172-4362-b7da-02f4552cb45b" (UID: "9ed81d6a-5172-4362-b7da-02f4552cb45b"). InnerVolumeSpecName "kube-api-access-86lfc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.572231 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.572287 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-86lfc\" (UniqueName: \"kubernetes.io/projected/9ed81d6a-5172-4362-b7da-02f4552cb45b-kube-api-access-86lfc\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.572303 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f1728b96-e1ba-4e27-a120-6f6fd3e85437-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.572314 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tktqf\" (UniqueName: \"kubernetes.io/projected/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc-kube-api-access-tktqf\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.572325 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n74l7\" (UniqueName: \"kubernetes.io/projected/f1728b96-e1ba-4e27-a120-6f6fd3e85437-kube-api-access-n74l7\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.572338 4961 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/9ed81d6a-5172-4362-b7da-02f4552cb45b-operator-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.719097 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" event={"ID":"9ed81d6a-5172-4362-b7da-02f4552cb45b","Type":"ContainerDied","Data":"0456527d2214ee7e3eaad127b86ae5f87367f4f6bb847d6a6059955e9f952015"} Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.719144 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0456527d2214ee7e3eaad127b86ae5f87367f4f6bb847d6a6059955e9f952015" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.719139 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-5809-account-create-update-5n4c9" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.724868 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-db-create-bmgv7" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.724860 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-db-create-bmgv7" event={"ID":"e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc","Type":"ContainerDied","Data":"dec16c7288e2e83ecb03772573d03ac8062ed48a3b8ebca6c5c8dd2fcf18414c"} Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.725038 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dec16c7288e2e83ecb03772573d03ac8062ed48a3b8ebca6c5c8dd2fcf18414c" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.730381 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-db-create-8zqnt" event={"ID":"f1728b96-e1ba-4e27-a120-6f6fd3e85437","Type":"ContainerDied","Data":"3f827d79ecc877f5e583594a3ff5b569710ad0831e0d2042f0aa2c1ddb39ca7e"} Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.730433 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f827d79ecc877f5e583594a3ff5b569710ad0831e0d2042f0aa2c1ddb39ca7e" Dec 05 17:52:34 crc kubenswrapper[4961]: I1205 17:52:34.731764 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-db-create-8zqnt" Dec 05 17:52:35 crc kubenswrapper[4961]: I1205 17:52:35.742936 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerStarted","Data":"8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d"} Dec 05 17:52:35 crc kubenswrapper[4961]: I1205 17:52:35.743267 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:52:35 crc kubenswrapper[4961]: I1205 17:52:35.790486 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=3.66078626 podStartE2EDuration="7.790449659s" podCreationTimestamp="2025-12-05 17:52:28 +0000 UTC" firstStartedPulling="2025-12-05 17:52:30.33118975 +0000 UTC m=+1156.392340223" lastFinishedPulling="2025-12-05 17:52:34.460853149 +0000 UTC m=+1160.522003622" observedRunningTime="2025-12-05 17:52:35.774129687 +0000 UTC m=+1161.835280200" watchObservedRunningTime="2025-12-05 17:52:35.790449659 +0000 UTC m=+1161.851600172" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.361890 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6nzdc"] Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362666 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362679 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362687 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362693 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362705 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="763814d3-5adb-4523-8baa-1ca7f7ecc86b" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362712 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="763814d3-5adb-4523-8baa-1ca7f7ecc86b" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362723 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="276d90d2-7b34-45be-a492-89dc67929102" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362729 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="276d90d2-7b34-45be-a492-89dc67929102" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362739 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9ed81d6a-5172-4362-b7da-02f4552cb45b" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362745 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="9ed81d6a-5172-4362-b7da-02f4552cb45b" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362764 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon-log" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362769 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon-log" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362784 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f1728b96-e1ba-4e27-a120-6f6fd3e85437" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362791 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f1728b96-e1ba-4e27-a120-6f6fd3e85437" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: E1205 17:52:39.362816 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362821 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.362988 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f1728b96-e1ba-4e27-a120-6f6fd3e85437" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363002 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="9ed81d6a-5172-4362-b7da-02f4552cb45b" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363013 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363027 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="763814d3-5adb-4523-8baa-1ca7f7ecc86b" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363038 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" containerName="mariadb-database-create" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363051 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363060 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="276d90d2-7b34-45be-a492-89dc67929102" containerName="mariadb-account-create-update" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363070 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="01eef206-68f4-4923-8253-2a130ba0dca3" containerName="horizon-log" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.363621 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.365604 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-scripts" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.365995 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.368864 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-v5jfr" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.376140 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6nzdc"] Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.484322 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-config-data\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.484469 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-scripts\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.484508 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jh58q\" (UniqueName: \"kubernetes.io/projected/48ded27c-f907-4876-899b-236f2b1aa906-kube-api-access-jh58q\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.484640 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.586140 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-scripts\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.586231 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jh58q\" (UniqueName: \"kubernetes.io/projected/48ded27c-f907-4876-899b-236f2b1aa906-kube-api-access-jh58q\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.586360 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.586404 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-config-data\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.592975 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-scripts\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.593678 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-config-data\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.596263 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-combined-ca-bundle\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.602977 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jh58q\" (UniqueName: \"kubernetes.io/projected/48ded27c-f907-4876-899b-236f2b1aa906-kube-api-access-jh58q\") pod \"nova-cell0-conductor-db-sync-6nzdc\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:39 crc kubenswrapper[4961]: I1205 17:52:39.687616 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.051818 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.052106 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-external-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.084712 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.100367 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-external-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.171430 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6nzdc"] Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.416740 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.417120 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-central-agent" containerID="cri-o://0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779" gracePeriod=30 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.417204 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="proxy-httpd" containerID="cri-o://8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d" gracePeriod=30 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.417231 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="sg-core" containerID="cri-o://8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c" gracePeriod=30 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.417214 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-notification-agent" containerID="cri-o://5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85" gracePeriod=30 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.630103 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.631379 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.676074 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.683101 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.788017 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" event={"ID":"48ded27c-f907-4876-899b-236f2b1aa906","Type":"ContainerStarted","Data":"48f7983ec34a960e3dc525cbc23b4ba464f26579f60291b94435362119bc8d3e"} Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.791321 4961 generic.go:334] "Generic (PLEG): container finished" podID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerID="8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d" exitCode=0 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.791360 4961 generic.go:334] "Generic (PLEG): container finished" podID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerID="8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c" exitCode=2 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.791372 4961 generic.go:334] "Generic (PLEG): container finished" podID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerID="5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85" exitCode=0 Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.791409 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerDied","Data":"8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d"} Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.791461 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerDied","Data":"8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c"} Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.791477 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerDied","Data":"5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85"} Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.792279 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.792309 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-external-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.792322 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:40 crc kubenswrapper[4961]: I1205 17:52:40.792333 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.320077 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425449 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cbx7k\" (UniqueName: \"kubernetes.io/projected/b0b81627-84e0-452a-b8c4-18c889d1d0b1-kube-api-access-cbx7k\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425541 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-sg-core-conf-yaml\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425697 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-combined-ca-bundle\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425731 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-log-httpd\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425753 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-scripts\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425815 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-config-data\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.425843 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-run-httpd\") pod \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\" (UID: \"b0b81627-84e0-452a-b8c4-18c889d1d0b1\") " Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.427481 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.428031 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.435035 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b0b81627-84e0-452a-b8c4-18c889d1d0b1-kube-api-access-cbx7k" (OuterVolumeSpecName: "kube-api-access-cbx7k") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "kube-api-access-cbx7k". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.441945 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-scripts" (OuterVolumeSpecName: "scripts") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.475030 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.527545 4961 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.527583 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cbx7k\" (UniqueName: \"kubernetes.io/projected/b0b81627-84e0-452a-b8c4-18c889d1d0b1-kube-api-access-cbx7k\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.527598 4961 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.527608 4961 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/b0b81627-84e0-452a-b8c4-18c889d1d0b1-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.527620 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.533730 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.551814 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-config-data" (OuterVolumeSpecName: "config-data") pod "b0b81627-84e0-452a-b8c4-18c889d1d0b1" (UID: "b0b81627-84e0-452a-b8c4-18c889d1d0b1"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.632506 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.632539 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b0b81627-84e0-452a-b8c4-18c889d1d0b1-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.806481 4961 generic.go:334] "Generic (PLEG): container finished" podID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerID="0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779" exitCode=0 Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.807008 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerDied","Data":"0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779"} Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.807081 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"b0b81627-84e0-452a-b8c4-18c889d1d0b1","Type":"ContainerDied","Data":"ee1b1da1b017744d6a8b469ae9ec9c23f318865e5ac42e07807bbb35c2745d29"} Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.807106 4961 scope.go:117] "RemoveContainer" containerID="8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.807100 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.867021 4961 scope.go:117] "RemoveContainer" containerID="8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.867235 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.901783 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.917882 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.918331 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-central-agent" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918349 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-central-agent" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.918362 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="proxy-httpd" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918370 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="proxy-httpd" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.918390 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-notification-agent" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918397 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-notification-agent" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.918413 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="sg-core" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918419 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="sg-core" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918584 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="proxy-httpd" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918597 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="sg-core" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918606 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-notification-agent" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.918617 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" containerName="ceilometer-central-agent" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.920294 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.924244 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.924275 4961 scope.go:117] "RemoveContainer" containerID="5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.925497 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.932966 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.954858 4961 scope.go:117] "RemoveContainer" containerID="0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.976928 4961 scope.go:117] "RemoveContainer" containerID="8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.977327 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d\": container with ID starting with 8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d not found: ID does not exist" containerID="8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977359 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d"} err="failed to get container status \"8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d\": rpc error: code = NotFound desc = could not find container \"8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d\": container with ID starting with 8769ab631ab572416a0b72099bf7786b1c9a518d374e382c123091823f2efe1d not found: ID does not exist" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977379 4961 scope.go:117] "RemoveContainer" containerID="8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.977587 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c\": container with ID starting with 8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c not found: ID does not exist" containerID="8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977610 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c"} err="failed to get container status \"8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c\": rpc error: code = NotFound desc = could not find container \"8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c\": container with ID starting with 8a6c68746812f14255774c6bee1842c34558602f1bace5efdc3129190a1d833c not found: ID does not exist" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977622 4961 scope.go:117] "RemoveContainer" containerID="5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.977794 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85\": container with ID starting with 5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85 not found: ID does not exist" containerID="5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977812 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85"} err="failed to get container status \"5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85\": rpc error: code = NotFound desc = could not find container \"5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85\": container with ID starting with 5a6b85fb4a13b5a38f97442a61c8695010a2d629e1c82476f1f85acda588fd85 not found: ID does not exist" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977822 4961 scope.go:117] "RemoveContainer" containerID="0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779" Dec 05 17:52:41 crc kubenswrapper[4961]: E1205 17:52:41.977969 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779\": container with ID starting with 0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779 not found: ID does not exist" containerID="0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779" Dec 05 17:52:41 crc kubenswrapper[4961]: I1205 17:52:41.977982 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779"} err="failed to get container status \"0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779\": rpc error: code = NotFound desc = could not find container \"0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779\": container with ID starting with 0160b394be450316b81f2461df981814b2daa80fa0fb431bbd1a9fa91e436779 not found: ID does not exist" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041069 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041205 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wltpr\" (UniqueName: \"kubernetes.io/projected/5fc252b7-4db4-4deb-9340-387064220193-kube-api-access-wltpr\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041253 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-run-httpd\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041274 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-log-httpd\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041317 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041376 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-config-data\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.041611 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-scripts\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.142927 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-run-httpd\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.142971 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-log-httpd\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.143000 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.143048 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-config-data\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.143130 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-scripts\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.143157 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.143225 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wltpr\" (UniqueName: \"kubernetes.io/projected/5fc252b7-4db4-4deb-9340-387064220193-kube-api-access-wltpr\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.143448 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-log-httpd\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.144050 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-run-httpd\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.148498 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-scripts\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.148899 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-config-data\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.150853 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.157825 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.165745 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wltpr\" (UniqueName: \"kubernetes.io/projected/5fc252b7-4db4-4deb-9340-387064220193-kube-api-access-wltpr\") pod \"ceilometer-0\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.246203 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.792901 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.817416 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerStarted","Data":"5e03fedf3a86894f541490124a0a7a3b7eb3c3c9b07eb5ec685443374a50878d"} Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.820961 4961 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.820995 4961 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.882067 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b0b81627-84e0-452a-b8c4-18c889d1d0b1" path="/var/lib/kubelet/pods/b0b81627-84e0-452a-b8c4-18c889d1d0b1/volumes" Dec 05 17:52:42 crc kubenswrapper[4961]: I1205 17:52:42.883082 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:52:43 crc kubenswrapper[4961]: I1205 17:52:43.059625 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:43 crc kubenswrapper[4961]: I1205 17:52:43.059801 4961 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Dec 05 17:52:43 crc kubenswrapper[4961]: I1205 17:52:43.324919 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:52:43 crc kubenswrapper[4961]: I1205 17:52:43.335181 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-external-api-0" Dec 05 17:52:43 crc kubenswrapper[4961]: I1205 17:52:43.359229 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/glance-default-internal-api-0" Dec 05 17:52:44 crc kubenswrapper[4961]: I1205 17:52:44.853751 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerStarted","Data":"5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea"} Dec 05 17:52:45 crc kubenswrapper[4961]: I1205 17:52:45.866197 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerStarted","Data":"62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729"} Dec 05 17:52:54 crc kubenswrapper[4961]: I1205 17:52:54.952988 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" event={"ID":"48ded27c-f907-4876-899b-236f2b1aa906","Type":"ContainerStarted","Data":"352294156f014497b9fe3a11132400266f168e51a879bebf89d68908807fa945"} Dec 05 17:52:54 crc kubenswrapper[4961]: I1205 17:52:54.955633 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerStarted","Data":"7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b"} Dec 05 17:52:54 crc kubenswrapper[4961]: I1205 17:52:54.976558 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" podStartSLOduration=1.924383949 podStartE2EDuration="15.976536208s" podCreationTimestamp="2025-12-05 17:52:39 +0000 UTC" firstStartedPulling="2025-12-05 17:52:40.177781615 +0000 UTC m=+1166.238932088" lastFinishedPulling="2025-12-05 17:52:54.229933874 +0000 UTC m=+1180.291084347" observedRunningTime="2025-12-05 17:52:54.969814171 +0000 UTC m=+1181.030964654" watchObservedRunningTime="2025-12-05 17:52:54.976536208 +0000 UTC m=+1181.037686681" Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.245673 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.246041 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.980724 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerStarted","Data":"2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b"} Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.980901 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-central-agent" containerID="cri-o://5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea" gracePeriod=30 Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.981032 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="sg-core" containerID="cri-o://7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b" gracePeriod=30 Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.981058 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-notification-agent" containerID="cri-o://62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729" gracePeriod=30 Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.981102 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="proxy-httpd" containerID="cri-o://2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b" gracePeriod=30 Dec 05 17:52:57 crc kubenswrapper[4961]: I1205 17:52:57.981157 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.002281 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=4.337538723 podStartE2EDuration="17.002264346s" podCreationTimestamp="2025-12-05 17:52:41 +0000 UTC" firstStartedPulling="2025-12-05 17:52:42.78570086 +0000 UTC m=+1168.846851333" lastFinishedPulling="2025-12-05 17:52:55.450426493 +0000 UTC m=+1181.511576956" observedRunningTime="2025-12-05 17:52:58.000333409 +0000 UTC m=+1184.061483892" watchObservedRunningTime="2025-12-05 17:52:58.002264346 +0000 UTC m=+1184.063414819" Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.992613 4961 generic.go:334] "Generic (PLEG): container finished" podID="5fc252b7-4db4-4deb-9340-387064220193" containerID="2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b" exitCode=0 Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.992966 4961 generic.go:334] "Generic (PLEG): container finished" podID="5fc252b7-4db4-4deb-9340-387064220193" containerID="7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b" exitCode=2 Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.992977 4961 generic.go:334] "Generic (PLEG): container finished" podID="5fc252b7-4db4-4deb-9340-387064220193" containerID="5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea" exitCode=0 Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.993002 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerDied","Data":"2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b"} Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.993037 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerDied","Data":"7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b"} Dec 05 17:52:58 crc kubenswrapper[4961]: I1205 17:52:58.993090 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerDied","Data":"5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea"} Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.571647 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679474 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-run-httpd\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679573 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wltpr\" (UniqueName: \"kubernetes.io/projected/5fc252b7-4db4-4deb-9340-387064220193-kube-api-access-wltpr\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679624 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-combined-ca-bundle\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679705 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-scripts\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679747 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-sg-core-conf-yaml\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679883 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-config-data\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.679933 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-log-httpd\") pod \"5fc252b7-4db4-4deb-9340-387064220193\" (UID: \"5fc252b7-4db4-4deb-9340-387064220193\") " Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.680624 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.680748 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.685569 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-scripts" (OuterVolumeSpecName: "scripts") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.699211 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fc252b7-4db4-4deb-9340-387064220193-kube-api-access-wltpr" (OuterVolumeSpecName: "kube-api-access-wltpr") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "kube-api-access-wltpr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.708552 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.755200 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.782081 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.782346 4961 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.782439 4961 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.782550 4961 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/5fc252b7-4db4-4deb-9340-387064220193-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.782625 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wltpr\" (UniqueName: \"kubernetes.io/projected/5fc252b7-4db4-4deb-9340-387064220193-kube-api-access-wltpr\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.782718 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.788467 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-config-data" (OuterVolumeSpecName: "config-data") pod "5fc252b7-4db4-4deb-9340-387064220193" (UID: "5fc252b7-4db4-4deb-9340-387064220193"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:52:59 crc kubenswrapper[4961]: I1205 17:52:59.884766 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5fc252b7-4db4-4deb-9340-387064220193-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.009126 4961 generic.go:334] "Generic (PLEG): container finished" podID="5fc252b7-4db4-4deb-9340-387064220193" containerID="62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729" exitCode=0 Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.009811 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerDied","Data":"62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729"} Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.009900 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.010744 4961 scope.go:117] "RemoveContainer" containerID="2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.010620 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"5fc252b7-4db4-4deb-9340-387064220193","Type":"ContainerDied","Data":"5e03fedf3a86894f541490124a0a7a3b7eb3c3c9b07eb5ec685443374a50878d"} Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.047029 4961 scope.go:117] "RemoveContainer" containerID="7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.074740 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.083636 4961 scope.go:117] "RemoveContainer" containerID="62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.088793 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.103180 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.103851 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="sg-core" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.103878 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="sg-core" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.103907 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-central-agent" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.103915 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-central-agent" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.103928 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-notification-agent" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.103935 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-notification-agent" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.103958 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="proxy-httpd" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.103965 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="proxy-httpd" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.104188 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-notification-agent" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.104224 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="sg-core" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.104238 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="proxy-httpd" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.104247 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5fc252b7-4db4-4deb-9340-387064220193" containerName="ceilometer-central-agent" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.106913 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.108718 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.109157 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.113660 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.147059 4961 scope.go:117] "RemoveContainer" containerID="5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.166574 4961 scope.go:117] "RemoveContainer" containerID="2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.167604 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b\": container with ID starting with 2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b not found: ID does not exist" containerID="2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.167639 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b"} err="failed to get container status \"2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b\": rpc error: code = NotFound desc = could not find container \"2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b\": container with ID starting with 2f3178e96a90b82d41d58f7d069bd95649dde5a8a48af8e84a76fd9e4282116b not found: ID does not exist" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.167916 4961 scope.go:117] "RemoveContainer" containerID="7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.168293 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b\": container with ID starting with 7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b not found: ID does not exist" containerID="7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.168322 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b"} err="failed to get container status \"7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b\": rpc error: code = NotFound desc = could not find container \"7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b\": container with ID starting with 7244ee1f9dc8e048a969fa37ec6d010b86df1682c7fbf36021b969fdd159a22b not found: ID does not exist" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.168335 4961 scope.go:117] "RemoveContainer" containerID="62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.168575 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729\": container with ID starting with 62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729 not found: ID does not exist" containerID="62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.168603 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729"} err="failed to get container status \"62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729\": rpc error: code = NotFound desc = could not find container \"62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729\": container with ID starting with 62d24f251817bdd0dcecd12f4c6a164e2203dd2650c944b441bb5911ecb89729 not found: ID does not exist" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.168615 4961 scope.go:117] "RemoveContainer" containerID="5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea" Dec 05 17:53:00 crc kubenswrapper[4961]: E1205 17:53:00.168806 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea\": container with ID starting with 5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea not found: ID does not exist" containerID="5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.168833 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea"} err="failed to get container status \"5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea\": rpc error: code = NotFound desc = could not find container \"5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea\": container with ID starting with 5f93bf96c62c798017171bb621e50daedce3421575d015b275a73e13023326ea not found: ID does not exist" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.292970 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-log-httpd\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.293066 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-config-data\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.293092 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.293341 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-run-httpd\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.293564 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-scripts\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.293602 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-24flb\" (UniqueName: \"kubernetes.io/projected/40d6bdd6-246b-487a-abf4-5e44db189027-kube-api-access-24flb\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.293835 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395086 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-run-httpd\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395198 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-scripts\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395226 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-24flb\" (UniqueName: \"kubernetes.io/projected/40d6bdd6-246b-487a-abf4-5e44db189027-kube-api-access-24flb\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395294 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395335 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-log-httpd\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395364 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-config-data\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395382 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395627 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-run-httpd\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.395903 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-log-httpd\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.401374 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.401982 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.403219 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-scripts\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.416511 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-config-data\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.417644 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-24flb\" (UniqueName: \"kubernetes.io/projected/40d6bdd6-246b-487a-abf4-5e44db189027-kube-api-access-24flb\") pod \"ceilometer-0\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.441061 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.874646 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fc252b7-4db4-4deb-9340-387064220193" path="/var/lib/kubelet/pods/5fc252b7-4db4-4deb-9340-387064220193/volumes" Dec 05 17:53:00 crc kubenswrapper[4961]: I1205 17:53:00.952285 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:00 crc kubenswrapper[4961]: W1205 17:53:00.960699 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod40d6bdd6_246b_487a_abf4_5e44db189027.slice/crio-6558e03de38533ed6b09b03f1ba7710f172eb9929837deac172a8a62f4263a52 WatchSource:0}: Error finding container 6558e03de38533ed6b09b03f1ba7710f172eb9929837deac172a8a62f4263a52: Status 404 returned error can't find the container with id 6558e03de38533ed6b09b03f1ba7710f172eb9929837deac172a8a62f4263a52 Dec 05 17:53:01 crc kubenswrapper[4961]: I1205 17:53:01.025291 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerStarted","Data":"6558e03de38533ed6b09b03f1ba7710f172eb9929837deac172a8a62f4263a52"} Dec 05 17:53:02 crc kubenswrapper[4961]: I1205 17:53:02.039354 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerStarted","Data":"85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40"} Dec 05 17:53:03 crc kubenswrapper[4961]: I1205 17:53:03.051012 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerStarted","Data":"264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756"} Dec 05 17:53:03 crc kubenswrapper[4961]: I1205 17:53:03.051293 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerStarted","Data":"cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec"} Dec 05 17:53:05 crc kubenswrapper[4961]: I1205 17:53:05.070190 4961 generic.go:334] "Generic (PLEG): container finished" podID="48ded27c-f907-4876-899b-236f2b1aa906" containerID="352294156f014497b9fe3a11132400266f168e51a879bebf89d68908807fa945" exitCode=0 Dec 05 17:53:05 crc kubenswrapper[4961]: I1205 17:53:05.070301 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" event={"ID":"48ded27c-f907-4876-899b-236f2b1aa906","Type":"ContainerDied","Data":"352294156f014497b9fe3a11132400266f168e51a879bebf89d68908807fa945"} Dec 05 17:53:05 crc kubenswrapper[4961]: I1205 17:53:05.074442 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerStarted","Data":"f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837"} Dec 05 17:53:05 crc kubenswrapper[4961]: I1205 17:53:05.074597 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:53:05 crc kubenswrapper[4961]: I1205 17:53:05.116149 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.118489116 podStartE2EDuration="5.116130365s" podCreationTimestamp="2025-12-05 17:53:00 +0000 UTC" firstStartedPulling="2025-12-05 17:53:00.962470696 +0000 UTC m=+1187.023621169" lastFinishedPulling="2025-12-05 17:53:03.960111945 +0000 UTC m=+1190.021262418" observedRunningTime="2025-12-05 17:53:05.106588978 +0000 UTC m=+1191.167739461" watchObservedRunningTime="2025-12-05 17:53:05.116130365 +0000 UTC m=+1191.177280838" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.450452 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.634638 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-config-data\") pod \"48ded27c-f907-4876-899b-236f2b1aa906\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.634789 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-combined-ca-bundle\") pod \"48ded27c-f907-4876-899b-236f2b1aa906\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.635033 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jh58q\" (UniqueName: \"kubernetes.io/projected/48ded27c-f907-4876-899b-236f2b1aa906-kube-api-access-jh58q\") pod \"48ded27c-f907-4876-899b-236f2b1aa906\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.635155 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-scripts\") pod \"48ded27c-f907-4876-899b-236f2b1aa906\" (UID: \"48ded27c-f907-4876-899b-236f2b1aa906\") " Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.640683 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/48ded27c-f907-4876-899b-236f2b1aa906-kube-api-access-jh58q" (OuterVolumeSpecName: "kube-api-access-jh58q") pod "48ded27c-f907-4876-899b-236f2b1aa906" (UID: "48ded27c-f907-4876-899b-236f2b1aa906"). InnerVolumeSpecName "kube-api-access-jh58q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.657106 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-scripts" (OuterVolumeSpecName: "scripts") pod "48ded27c-f907-4876-899b-236f2b1aa906" (UID: "48ded27c-f907-4876-899b-236f2b1aa906"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.663253 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-config-data" (OuterVolumeSpecName: "config-data") pod "48ded27c-f907-4876-899b-236f2b1aa906" (UID: "48ded27c-f907-4876-899b-236f2b1aa906"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.669702 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "48ded27c-f907-4876-899b-236f2b1aa906" (UID: "48ded27c-f907-4876-899b-236f2b1aa906"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.737144 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.737175 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.737187 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jh58q\" (UniqueName: \"kubernetes.io/projected/48ded27c-f907-4876-899b-236f2b1aa906-kube-api-access-jh58q\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:06 crc kubenswrapper[4961]: I1205 17:53:06.737196 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/48ded27c-f907-4876-899b-236f2b1aa906-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.097194 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" event={"ID":"48ded27c-f907-4876-899b-236f2b1aa906","Type":"ContainerDied","Data":"48f7983ec34a960e3dc525cbc23b4ba464f26579f60291b94435362119bc8d3e"} Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.097565 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="48f7983ec34a960e3dc525cbc23b4ba464f26579f60291b94435362119bc8d3e" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.097296 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-db-sync-6nzdc" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.203490 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 17:53:07 crc kubenswrapper[4961]: E1205 17:53:07.204009 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="48ded27c-f907-4876-899b-236f2b1aa906" containerName="nova-cell0-conductor-db-sync" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.204031 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="48ded27c-f907-4876-899b-236f2b1aa906" containerName="nova-cell0-conductor-db-sync" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.204260 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="48ded27c-f907-4876-899b-236f2b1aa906" containerName="nova-cell0-conductor-db-sync" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.204971 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.207620 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-conductor-config-data" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.207978 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-nova-dockercfg-v5jfr" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.230633 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.348815 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e29a1c-419b-4ab0-84ca-b87652bf1812-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.349290 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8e29a1c-419b-4ab0-84ca-b87652bf1812-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.349597 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xg9sh\" (UniqueName: \"kubernetes.io/projected/b8e29a1c-419b-4ab0-84ca-b87652bf1812-kube-api-access-xg9sh\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.451373 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8e29a1c-419b-4ab0-84ca-b87652bf1812-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.451766 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xg9sh\" (UniqueName: \"kubernetes.io/projected/b8e29a1c-419b-4ab0-84ca-b87652bf1812-kube-api-access-xg9sh\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.451891 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e29a1c-419b-4ab0-84ca-b87652bf1812-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.458409 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8e29a1c-419b-4ab0-84ca-b87652bf1812-combined-ca-bundle\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.462063 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b8e29a1c-419b-4ab0-84ca-b87652bf1812-config-data\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.473118 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xg9sh\" (UniqueName: \"kubernetes.io/projected/b8e29a1c-419b-4ab0-84ca-b87652bf1812-kube-api-access-xg9sh\") pod \"nova-cell0-conductor-0\" (UID: \"b8e29a1c-419b-4ab0-84ca-b87652bf1812\") " pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.523009 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:07 crc kubenswrapper[4961]: I1205 17:53:07.990103 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-conductor-0"] Dec 05 17:53:08 crc kubenswrapper[4961]: W1205 17:53:08.001140 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb8e29a1c_419b_4ab0_84ca_b87652bf1812.slice/crio-041a517d8438c3bf7c4488e154dfb1904d65831aa057ea5346356f9ff9582f3d WatchSource:0}: Error finding container 041a517d8438c3bf7c4488e154dfb1904d65831aa057ea5346356f9ff9582f3d: Status 404 returned error can't find the container with id 041a517d8438c3bf7c4488e154dfb1904d65831aa057ea5346356f9ff9582f3d Dec 05 17:53:08 crc kubenswrapper[4961]: I1205 17:53:08.120123 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b8e29a1c-419b-4ab0-84ca-b87652bf1812","Type":"ContainerStarted","Data":"041a517d8438c3bf7c4488e154dfb1904d65831aa057ea5346356f9ff9582f3d"} Dec 05 17:53:09 crc kubenswrapper[4961]: I1205 17:53:09.130804 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-conductor-0" event={"ID":"b8e29a1c-419b-4ab0-84ca-b87652bf1812","Type":"ContainerStarted","Data":"829255351b8168b5d068328f61cbed5703138e01f468350d49e73d5a78226b21"} Dec 05 17:53:09 crc kubenswrapper[4961]: I1205 17:53:09.130986 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:09 crc kubenswrapper[4961]: I1205 17:53:09.150619 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-conductor-0" podStartSLOduration=2.150603544 podStartE2EDuration="2.150603544s" podCreationTimestamp="2025-12-05 17:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:09.14598351 +0000 UTC m=+1195.207134003" watchObservedRunningTime="2025-12-05 17:53:09.150603544 +0000 UTC m=+1195.211754017" Dec 05 17:53:17 crc kubenswrapper[4961]: I1205 17:53:17.556098 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell0-conductor-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.075081 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell0-cell-mapping-8k9zj"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.082937 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.086182 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-scripts" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.087249 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell0-manage-config-data" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.100736 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8k9zj"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.146731 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bdwwd\" (UniqueName: \"kubernetes.io/projected/85bec0ac-6b6a-4532-a810-f468dded1d0e-kube-api-access-bdwwd\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.146811 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-scripts\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.146909 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-config-data\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.146953 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.250623 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bdwwd\" (UniqueName: \"kubernetes.io/projected/85bec0ac-6b6a-4532-a810-f468dded1d0e-kube-api-access-bdwwd\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.250688 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-scripts\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.250810 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-config-data\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.250863 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.262489 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-combined-ca-bundle\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.265285 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-config-data\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.271391 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-scripts\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.277080 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.278530 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.291181 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.306753 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bdwwd\" (UniqueName: \"kubernetes.io/projected/85bec0ac-6b6a-4532-a810-f468dded1d0e-kube-api-access-bdwwd\") pod \"nova-cell0-cell-mapping-8k9zj\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.341149 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.358345 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.359983 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.362804 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.395659 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.442259 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455468 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455524 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455555 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c47b8\" (UniqueName: \"kubernetes.io/projected/618faf17-2f1c-408f-82bd-ddee38044c18-kube-api-access-c47b8\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455575 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618faf17-2f1c-408f-82bd-ddee38044c18-logs\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455640 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tsdl\" (UniqueName: \"kubernetes.io/projected/0bfd30f6-3c39-4fb5-a918-439eb03161d5-kube-api-access-4tsdl\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455669 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-config-data\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.455691 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-config-data\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.512787 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.514324 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.519941 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558477 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558527 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558556 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c47b8\" (UniqueName: \"kubernetes.io/projected/618faf17-2f1c-408f-82bd-ddee38044c18-kube-api-access-c47b8\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558577 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618faf17-2f1c-408f-82bd-ddee38044c18-logs\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558641 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tsdl\" (UniqueName: \"kubernetes.io/projected/0bfd30f6-3c39-4fb5-a918-439eb03161d5-kube-api-access-4tsdl\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558669 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-config-data\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.558718 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-config-data\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.562262 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.563701 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618faf17-2f1c-408f-82bd-ddee38044c18-logs\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.565277 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.574227 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.576106 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-config-data\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.596309 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-config-data\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.598401 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c47b8\" (UniqueName: \"kubernetes.io/projected/618faf17-2f1c-408f-82bd-ddee38044c18-kube-api-access-c47b8\") pod \"nova-api-0\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.618514 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-wcz6s"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.620314 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.623431 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tsdl\" (UniqueName: \"kubernetes.io/projected/0bfd30f6-3c39-4fb5-a918-439eb03161d5-kube-api-access-4tsdl\") pod \"nova-scheduler-0\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.634903 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-wcz6s"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.661796 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qt97z\" (UniqueName: \"kubernetes.io/projected/78e88132-9629-46fb-961f-87e507341bb7-kube-api-access-qt97z\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.661869 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-config-data\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.661920 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78e88132-9629-46fb-961f-87e507341bb7-logs\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.661936 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.686580 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.688160 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.689905 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.690375 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.706949 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.717031 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.765107 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-svc\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.765600 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-config\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.765640 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.765673 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qt97z\" (UniqueName: \"kubernetes.io/projected/78e88132-9629-46fb-961f-87e507341bb7-kube-api-access-qt97z\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.766498 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-77hl2\" (UniqueName: \"kubernetes.io/projected/474ec1a8-a032-453c-aa79-d05a4cc04c0f-kube-api-access-77hl2\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.766577 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.766615 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.766639 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xn9j4\" (UniqueName: \"kubernetes.io/projected/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-kube-api-access-xn9j4\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.766668 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-config-data\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.767594 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78e88132-9629-46fb-961f-87e507341bb7-logs\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.767627 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.767886 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.767936 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.769933 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78e88132-9629-46fb-961f-87e507341bb7-logs\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.779873 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.785971 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-config-data\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.790380 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qt97z\" (UniqueName: \"kubernetes.io/projected/78e88132-9629-46fb-961f-87e507341bb7-kube-api-access-qt97z\") pod \"nova-metadata-0\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " pod="openstack/nova-metadata-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.868599 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-config\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.869538 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.869572 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-77hl2\" (UniqueName: \"kubernetes.io/projected/474ec1a8-a032-453c-aa79-d05a4cc04c0f-kube-api-access-77hl2\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.869643 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.869684 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.869709 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xn9j4\" (UniqueName: \"kubernetes.io/projected/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-kube-api-access-xn9j4\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.869494 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-config\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.872425 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.872485 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.872545 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-svc\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.873281 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-svc\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.875881 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-nb\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.877122 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-swift-storage-0\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.877251 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.879758 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-sb\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.888308 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.908002 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-77hl2\" (UniqueName: \"kubernetes.io/projected/474ec1a8-a032-453c-aa79-d05a4cc04c0f-kube-api-access-77hl2\") pod \"nova-cell1-novncproxy-0\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.908217 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xn9j4\" (UniqueName: \"kubernetes.io/projected/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-kube-api-access-xn9j4\") pod \"dnsmasq-dns-865f5d856f-wcz6s\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:18 crc kubenswrapper[4961]: I1205 17:53:18.991616 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.018247 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.038819 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.272146 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell0-cell-mapping-8k9zj"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.468852 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.482123 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dxcw5"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.483361 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.486271 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-scripts" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.489455 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.491830 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dxcw5"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.502075 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.595796 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-config-data\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.595873 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.595943 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-scripts\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.596025 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-stq9s\" (UniqueName: \"kubernetes.io/projected/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-kube-api-access-stq9s\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.697285 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-stq9s\" (UniqueName: \"kubernetes.io/projected/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-kube-api-access-stq9s\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.697388 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-config-data\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.697427 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.697474 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-scripts\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.705711 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-combined-ca-bundle\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.706027 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-scripts\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.706759 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-config-data\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.723574 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-stq9s\" (UniqueName: \"kubernetes.io/projected/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-kube-api-access-stq9s\") pod \"nova-cell1-conductor-db-sync-dxcw5\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.906280 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-wcz6s"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.913879 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:19 crc kubenswrapper[4961]: I1205 17:53:19.933564 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.061128 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.266850 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0bfd30f6-3c39-4fb5-a918-439eb03161d5","Type":"ContainerStarted","Data":"f6801c1787cb500023523465719fcbb58a683f267ba5a850c5a2fa32e260bbb7"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.269237 4961 generic.go:334] "Generic (PLEG): container finished" podID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerID="34d06768645ab6c9b378589d84f52d001f23974059838a58d9262de6040a8d6d" exitCode=0 Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.269309 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" event={"ID":"6dfc20b2-940f-46c8-bcaa-e9e209f92bed","Type":"ContainerDied","Data":"34d06768645ab6c9b378589d84f52d001f23974059838a58d9262de6040a8d6d"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.269336 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" event={"ID":"6dfc20b2-940f-46c8-bcaa-e9e209f92bed","Type":"ContainerStarted","Data":"46cfc46aacd5832b80510d97b46251ed0304f0d4ffb3bdd7d9ff668f529ccac1"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.274866 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8k9zj" event={"ID":"85bec0ac-6b6a-4532-a810-f468dded1d0e","Type":"ContainerStarted","Data":"ad19af129ed4b2a2d920494446107a71aafadc14cc4915299ed33300a7ebcf0e"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.274944 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8k9zj" event={"ID":"85bec0ac-6b6a-4532-a810-f468dded1d0e","Type":"ContainerStarted","Data":"189e65f879329b9c07dba217210d93b5ba8feba923535f27118df1f82f3ef8c4"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.281846 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"474ec1a8-a032-453c-aa79-d05a4cc04c0f","Type":"ContainerStarted","Data":"3acebaf711b5549d86e159c7963b0c501ea91ff152fe100a9d7ed03f058c0b90"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.301939 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"618faf17-2f1c-408f-82bd-ddee38044c18","Type":"ContainerStarted","Data":"9e81f4e9ca53299b3fcbae94418567176cdbd5611872a76eb4a5d2b043ddf2d5"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.306230 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"78e88132-9629-46fb-961f-87e507341bb7","Type":"ContainerStarted","Data":"95a38d2b1b9e823fd357935627b8622fb866432787414295efcc9c98acaa5690"} Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.328412 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell0-cell-mapping-8k9zj" podStartSLOduration=2.328386064 podStartE2EDuration="2.328386064s" podCreationTimestamp="2025-12-05 17:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:20.320096938 +0000 UTC m=+1206.381247411" watchObservedRunningTime="2025-12-05 17:53:20.328386064 +0000 UTC m=+1206.389536537" Dec 05 17:53:20 crc kubenswrapper[4961]: I1205 17:53:20.494296 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dxcw5"] Dec 05 17:53:21 crc kubenswrapper[4961]: I1205 17:53:21.320016 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" event={"ID":"6dfc20b2-940f-46c8-bcaa-e9e209f92bed","Type":"ContainerStarted","Data":"d8fc8ca51d295bba0225ad203c4bd58dadcc4898088dc29267e35c25fd830d0f"} Dec 05 17:53:21 crc kubenswrapper[4961]: I1205 17:53:21.320946 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:21 crc kubenswrapper[4961]: I1205 17:53:21.328864 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" event={"ID":"02132a97-8a35-4fc1-80ae-fb221ab2b8c6","Type":"ContainerStarted","Data":"bfe9bba830ac76f72adc2e647d726c432709084db421691029eb237d2490dc53"} Dec 05 17:53:21 crc kubenswrapper[4961]: I1205 17:53:21.328918 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" event={"ID":"02132a97-8a35-4fc1-80ae-fb221ab2b8c6","Type":"ContainerStarted","Data":"8eb18bc57c35075ace8f1be7a16c2597a3b2b598ede316f994327fc5388a7e3f"} Dec 05 17:53:21 crc kubenswrapper[4961]: I1205 17:53:21.355458 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" podStartSLOduration=3.355439971 podStartE2EDuration="3.355439971s" podCreationTimestamp="2025-12-05 17:53:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:21.348074648 +0000 UTC m=+1207.409225141" watchObservedRunningTime="2025-12-05 17:53:21.355439971 +0000 UTC m=+1207.416590444" Dec 05 17:53:21 crc kubenswrapper[4961]: I1205 17:53:21.372802 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" podStartSLOduration=2.372770781 podStartE2EDuration="2.372770781s" podCreationTimestamp="2025-12-05 17:53:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:21.36264892 +0000 UTC m=+1207.423799403" watchObservedRunningTime="2025-12-05 17:53:21.372770781 +0000 UTC m=+1207.433921254" Dec 05 17:53:22 crc kubenswrapper[4961]: I1205 17:53:22.612851 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:22 crc kubenswrapper[4961]: I1205 17:53:22.637518 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.354394 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"618faf17-2f1c-408f-82bd-ddee38044c18","Type":"ContainerStarted","Data":"37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714"} Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.354694 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"618faf17-2f1c-408f-82bd-ddee38044c18","Type":"ContainerStarted","Data":"242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0"} Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.356602 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"78e88132-9629-46fb-961f-87e507341bb7","Type":"ContainerStarted","Data":"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b"} Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.356648 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"78e88132-9629-46fb-961f-87e507341bb7","Type":"ContainerStarted","Data":"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219"} Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.356746 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-log" containerID="cri-o://eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219" gracePeriod=30 Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.356797 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-metadata" containerID="cri-o://5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b" gracePeriod=30 Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.362644 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0bfd30f6-3c39-4fb5-a918-439eb03161d5","Type":"ContainerStarted","Data":"f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e"} Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.366215 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"474ec1a8-a032-453c-aa79-d05a4cc04c0f","Type":"ContainerStarted","Data":"15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc"} Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.366561 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-cell1-novncproxy-0" podUID="474ec1a8-a032-453c-aa79-d05a4cc04c0f" containerName="nova-cell1-novncproxy-novncproxy" containerID="cri-o://15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc" gracePeriod=30 Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.384656 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.479918665 podStartE2EDuration="5.384633827s" podCreationTimestamp="2025-12-05 17:53:18 +0000 UTC" firstStartedPulling="2025-12-05 17:53:19.544983445 +0000 UTC m=+1205.606133918" lastFinishedPulling="2025-12-05 17:53:22.449698607 +0000 UTC m=+1208.510849080" observedRunningTime="2025-12-05 17:53:23.375919311 +0000 UTC m=+1209.437069794" watchObservedRunningTime="2025-12-05 17:53:23.384633827 +0000 UTC m=+1209.445784290" Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.404352 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.872638294 podStartE2EDuration="5.404330376s" podCreationTimestamp="2025-12-05 17:53:18 +0000 UTC" firstStartedPulling="2025-12-05 17:53:19.917883062 +0000 UTC m=+1205.979033535" lastFinishedPulling="2025-12-05 17:53:22.449575144 +0000 UTC m=+1208.510725617" observedRunningTime="2025-12-05 17:53:23.393836356 +0000 UTC m=+1209.454986849" watchObservedRunningTime="2025-12-05 17:53:23.404330376 +0000 UTC m=+1209.465480849" Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.413275 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.514077352 podStartE2EDuration="5.413254448s" podCreationTimestamp="2025-12-05 17:53:18 +0000 UTC" firstStartedPulling="2025-12-05 17:53:19.550411239 +0000 UTC m=+1205.611561702" lastFinishedPulling="2025-12-05 17:53:22.449588325 +0000 UTC m=+1208.510738798" observedRunningTime="2025-12-05 17:53:23.409345671 +0000 UTC m=+1209.470496154" watchObservedRunningTime="2025-12-05 17:53:23.413254448 +0000 UTC m=+1209.474404921" Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.434464 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=3.07625849 podStartE2EDuration="5.434443914s" podCreationTimestamp="2025-12-05 17:53:18 +0000 UTC" firstStartedPulling="2025-12-05 17:53:20.093865381 +0000 UTC m=+1206.155015854" lastFinishedPulling="2025-12-05 17:53:22.452050795 +0000 UTC m=+1208.513201278" observedRunningTime="2025-12-05 17:53:23.426351573 +0000 UTC m=+1209.487502076" watchObservedRunningTime="2025-12-05 17:53:23.434443914 +0000 UTC m=+1209.495594387" Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.707252 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.992745 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:53:23 crc kubenswrapper[4961]: I1205 17:53:23.993090 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.009308 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.039948 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.095206 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qt97z\" (UniqueName: \"kubernetes.io/projected/78e88132-9629-46fb-961f-87e507341bb7-kube-api-access-qt97z\") pod \"78e88132-9629-46fb-961f-87e507341bb7\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.095292 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-config-data\") pod \"78e88132-9629-46fb-961f-87e507341bb7\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.095631 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78e88132-9629-46fb-961f-87e507341bb7-logs\") pod \"78e88132-9629-46fb-961f-87e507341bb7\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.095718 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-combined-ca-bundle\") pod \"78e88132-9629-46fb-961f-87e507341bb7\" (UID: \"78e88132-9629-46fb-961f-87e507341bb7\") " Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.312766 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78e88132-9629-46fb-961f-87e507341bb7-logs" (OuterVolumeSpecName: "logs") pod "78e88132-9629-46fb-961f-87e507341bb7" (UID: "78e88132-9629-46fb-961f-87e507341bb7"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.314223 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78e88132-9629-46fb-961f-87e507341bb7-kube-api-access-qt97z" (OuterVolumeSpecName: "kube-api-access-qt97z") pod "78e88132-9629-46fb-961f-87e507341bb7" (UID: "78e88132-9629-46fb-961f-87e507341bb7"). InnerVolumeSpecName "kube-api-access-qt97z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.318629 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-config-data" (OuterVolumeSpecName: "config-data") pod "78e88132-9629-46fb-961f-87e507341bb7" (UID: "78e88132-9629-46fb-961f-87e507341bb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.318891 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "78e88132-9629-46fb-961f-87e507341bb7" (UID: "78e88132-9629-46fb-961f-87e507341bb7"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381500 4961 generic.go:334] "Generic (PLEG): container finished" podID="78e88132-9629-46fb-961f-87e507341bb7" containerID="5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b" exitCode=0 Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381540 4961 generic.go:334] "Generic (PLEG): container finished" podID="78e88132-9629-46fb-961f-87e507341bb7" containerID="eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219" exitCode=143 Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381586 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381597 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"78e88132-9629-46fb-961f-87e507341bb7","Type":"ContainerDied","Data":"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b"} Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381640 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"78e88132-9629-46fb-961f-87e507341bb7","Type":"ContainerDied","Data":"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219"} Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381682 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"78e88132-9629-46fb-961f-87e507341bb7","Type":"ContainerDied","Data":"95a38d2b1b9e823fd357935627b8622fb866432787414295efcc9c98acaa5690"} Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.381704 4961 scope.go:117] "RemoveContainer" containerID="5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.401982 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/78e88132-9629-46fb-961f-87e507341bb7-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.402160 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.402219 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qt97z\" (UniqueName: \"kubernetes.io/projected/78e88132-9629-46fb-961f-87e507341bb7-kube-api-access-qt97z\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.402300 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/78e88132-9629-46fb-961f-87e507341bb7-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.428608 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.443293 4961 scope.go:117] "RemoveContainer" containerID="eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.443622 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.458330 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:24 crc kubenswrapper[4961]: E1205 17:53:24.460422 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-metadata" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.460450 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-metadata" Dec 05 17:53:24 crc kubenswrapper[4961]: E1205 17:53:24.460491 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-log" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.460499 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-log" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.460751 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-log" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.460789 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="78e88132-9629-46fb-961f-87e507341bb7" containerName="nova-metadata-metadata" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.462081 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.464804 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.464989 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.475588 4961 scope.go:117] "RemoveContainer" containerID="5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b" Dec 05 17:53:24 crc kubenswrapper[4961]: E1205 17:53:24.475989 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b\": container with ID starting with 5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b not found: ID does not exist" containerID="5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476015 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b"} err="failed to get container status \"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b\": rpc error: code = NotFound desc = could not find container \"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b\": container with ID starting with 5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b not found: ID does not exist" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476034 4961 scope.go:117] "RemoveContainer" containerID="eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219" Dec 05 17:53:24 crc kubenswrapper[4961]: E1205 17:53:24.476264 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219\": container with ID starting with eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219 not found: ID does not exist" containerID="eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476289 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219"} err="failed to get container status \"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219\": rpc error: code = NotFound desc = could not find container \"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219\": container with ID starting with eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219 not found: ID does not exist" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476302 4961 scope.go:117] "RemoveContainer" containerID="5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476465 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b"} err="failed to get container status \"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b\": rpc error: code = NotFound desc = could not find container \"5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b\": container with ID starting with 5574aefa6c8975e426a7ded37b787f75c378b4b18604e02ab181dc310dff571b not found: ID does not exist" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476486 4961 scope.go:117] "RemoveContainer" containerID="eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.476636 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219"} err="failed to get container status \"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219\": rpc error: code = NotFound desc = could not find container \"eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219\": container with ID starting with eb3727e62a573261e0d87c40fa60d6a3ff09daae6e4acdf48ba468745d4ef219 not found: ID does not exist" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.485237 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.606947 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xcv8b\" (UniqueName: \"kubernetes.io/projected/6970b0ab-c215-4a93-9927-603b9aaea264-kube-api-access-xcv8b\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.607015 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.607138 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.607176 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-config-data\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.607199 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6970b0ab-c215-4a93-9927-603b9aaea264-logs\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.708819 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xcv8b\" (UniqueName: \"kubernetes.io/projected/6970b0ab-c215-4a93-9927-603b9aaea264-kube-api-access-xcv8b\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.709102 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.709209 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.709242 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-config-data\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.709262 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6970b0ab-c215-4a93-9927-603b9aaea264-logs\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.709765 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6970b0ab-c215-4a93-9927-603b9aaea264-logs\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.714945 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-config-data\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.715993 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.718457 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.726235 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xcv8b\" (UniqueName: \"kubernetes.io/projected/6970b0ab-c215-4a93-9927-603b9aaea264-kube-api-access-xcv8b\") pod \"nova-metadata-0\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.820504 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:24 crc kubenswrapper[4961]: I1205 17:53:24.878801 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78e88132-9629-46fb-961f-87e507341bb7" path="/var/lib/kubelet/pods/78e88132-9629-46fb-961f-87e507341bb7/volumes" Dec 05 17:53:25 crc kubenswrapper[4961]: I1205 17:53:25.306287 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:25 crc kubenswrapper[4961]: I1205 17:53:25.390794 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6970b0ab-c215-4a93-9927-603b9aaea264","Type":"ContainerStarted","Data":"ad47125fc25af5d80345e48306ecf99b9d500df8963e951e5ac8ebcaadc8a45f"} Dec 05 17:53:26 crc kubenswrapper[4961]: I1205 17:53:26.405960 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6970b0ab-c215-4a93-9927-603b9aaea264","Type":"ContainerStarted","Data":"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634"} Dec 05 17:53:26 crc kubenswrapper[4961]: I1205 17:53:26.406245 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6970b0ab-c215-4a93-9927-603b9aaea264","Type":"ContainerStarted","Data":"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d"} Dec 05 17:53:26 crc kubenswrapper[4961]: I1205 17:53:26.435295 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.435273372 podStartE2EDuration="2.435273372s" podCreationTimestamp="2025-12-05 17:53:24 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:26.434044322 +0000 UTC m=+1212.495194835" watchObservedRunningTime="2025-12-05 17:53:26.435273372 +0000 UTC m=+1212.496423855" Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.245660 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.245750 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.245834 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.246676 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ea0a333281fac885bbddc52760bdc853687db225a39907ca95a10b1226157c7e"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.246728 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://ea0a333281fac885bbddc52760bdc853687db225a39907ca95a10b1226157c7e" gracePeriod=600 Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.418870 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="ea0a333281fac885bbddc52760bdc853687db225a39907ca95a10b1226157c7e" exitCode=0 Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.418995 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"ea0a333281fac885bbddc52760bdc853687db225a39907ca95a10b1226157c7e"} Dec 05 17:53:27 crc kubenswrapper[4961]: I1205 17:53:27.419238 4961 scope.go:117] "RemoveContainer" containerID="7d9ed5247bcd400783c6d7e2c5bad6038c840849b5210eed14fdd0422b90e593" Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.434103 4961 generic.go:334] "Generic (PLEG): container finished" podID="85bec0ac-6b6a-4532-a810-f468dded1d0e" containerID="ad19af129ed4b2a2d920494446107a71aafadc14cc4915299ed33300a7ebcf0e" exitCode=0 Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.434223 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8k9zj" event={"ID":"85bec0ac-6b6a-4532-a810-f468dded1d0e","Type":"ContainerDied","Data":"ad19af129ed4b2a2d920494446107a71aafadc14cc4915299ed33300a7ebcf0e"} Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.438090 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"511b0e3c93cece387d56804a522b75bb0ef062e17cb5f9c42dba313a290e70af"} Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.691238 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.691295 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.707283 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 17:53:28 crc kubenswrapper[4961]: I1205 17:53:28.734645 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.020207 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.127807 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-r59rl"] Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.128198 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerName="dnsmasq-dns" containerID="cri-o://075befaa3821d8d3d16b09903923a00b8c7d3e1447c486fc67a827debff4174d" gracePeriod=10 Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.472250 4961 generic.go:334] "Generic (PLEG): container finished" podID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerID="075befaa3821d8d3d16b09903923a00b8c7d3e1447c486fc67a827debff4174d" exitCode=0 Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.475099 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" event={"ID":"76453a2a-3b0c-40e3-8a89-f47adb86c0b0","Type":"ContainerDied","Data":"075befaa3821d8d3d16b09903923a00b8c7d3e1447c486fc67a827debff4174d"} Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.523022 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.643610 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.738427 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-svc\") pod \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.738814 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-sb\") pod \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.739005 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-swift-storage-0\") pod \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.739037 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7kd8g\" (UniqueName: \"kubernetes.io/projected/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-kube-api-access-7kd8g\") pod \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.739109 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-config\") pod \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.739166 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-nb\") pod \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\" (UID: \"76453a2a-3b0c-40e3-8a89-f47adb86c0b0\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.763191 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-kube-api-access-7kd8g" (OuterVolumeSpecName: "kube-api-access-7kd8g") pod "76453a2a-3b0c-40e3-8a89-f47adb86c0b0" (UID: "76453a2a-3b0c-40e3-8a89-f47adb86c0b0"). InnerVolumeSpecName "kube-api-access-7kd8g". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.780141 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.780598 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.184:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.821748 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.822911 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.823163 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "76453a2a-3b0c-40e3-8a89-f47adb86c0b0" (UID: "76453a2a-3b0c-40e3-8a89-f47adb86c0b0"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.836060 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-config" (OuterVolumeSpecName: "config") pod "76453a2a-3b0c-40e3-8a89-f47adb86c0b0" (UID: "76453a2a-3b0c-40e3-8a89-f47adb86c0b0"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.841835 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7kd8g\" (UniqueName: \"kubernetes.io/projected/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-kube-api-access-7kd8g\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.841867 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.841878 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.851506 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.854452 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "76453a2a-3b0c-40e3-8a89-f47adb86c0b0" (UID: "76453a2a-3b0c-40e3-8a89-f47adb86c0b0"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.858808 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "76453a2a-3b0c-40e3-8a89-f47adb86c0b0" (UID: "76453a2a-3b0c-40e3-8a89-f47adb86c0b0"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.861403 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "76453a2a-3b0c-40e3-8a89-f47adb86c0b0" (UID: "76453a2a-3b0c-40e3-8a89-f47adb86c0b0"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.943335 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-combined-ca-bundle\") pod \"85bec0ac-6b6a-4532-a810-f468dded1d0e\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.943649 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bdwwd\" (UniqueName: \"kubernetes.io/projected/85bec0ac-6b6a-4532-a810-f468dded1d0e-kube-api-access-bdwwd\") pod \"85bec0ac-6b6a-4532-a810-f468dded1d0e\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.943800 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-config-data\") pod \"85bec0ac-6b6a-4532-a810-f468dded1d0e\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.943939 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-scripts\") pod \"85bec0ac-6b6a-4532-a810-f468dded1d0e\" (UID: \"85bec0ac-6b6a-4532-a810-f468dded1d0e\") " Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.945004 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.945107 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.945167 4961 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/76453a2a-3b0c-40e3-8a89-f47adb86c0b0-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.947841 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-scripts" (OuterVolumeSpecName: "scripts") pod "85bec0ac-6b6a-4532-a810-f468dded1d0e" (UID: "85bec0ac-6b6a-4532-a810-f468dded1d0e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.949142 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85bec0ac-6b6a-4532-a810-f468dded1d0e-kube-api-access-bdwwd" (OuterVolumeSpecName: "kube-api-access-bdwwd") pod "85bec0ac-6b6a-4532-a810-f468dded1d0e" (UID: "85bec0ac-6b6a-4532-a810-f468dded1d0e"). InnerVolumeSpecName "kube-api-access-bdwwd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.978886 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-config-data" (OuterVolumeSpecName: "config-data") pod "85bec0ac-6b6a-4532-a810-f468dded1d0e" (UID: "85bec0ac-6b6a-4532-a810-f468dded1d0e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:29 crc kubenswrapper[4961]: I1205 17:53:29.984954 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "85bec0ac-6b6a-4532-a810-f468dded1d0e" (UID: "85bec0ac-6b6a-4532-a810-f468dded1d0e"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.047753 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.047816 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.047833 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bdwwd\" (UniqueName: \"kubernetes.io/projected/85bec0ac-6b6a-4532-a810-f468dded1d0e-kube-api-access-bdwwd\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.047845 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85bec0ac-6b6a-4532-a810-f468dded1d0e-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.447672 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.506697 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell0-cell-mapping-8k9zj" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.506833 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell0-cell-mapping-8k9zj" event={"ID":"85bec0ac-6b6a-4532-a810-f468dded1d0e","Type":"ContainerDied","Data":"189e65f879329b9c07dba217210d93b5ba8feba923535f27118df1f82f3ef8c4"} Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.508131 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="189e65f879329b9c07dba217210d93b5ba8feba923535f27118df1f82f3ef8c4" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.510996 4961 generic.go:334] "Generic (PLEG): container finished" podID="02132a97-8a35-4fc1-80ae-fb221ab2b8c6" containerID="bfe9bba830ac76f72adc2e647d726c432709084db421691029eb237d2490dc53" exitCode=0 Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.511094 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" event={"ID":"02132a97-8a35-4fc1-80ae-fb221ab2b8c6","Type":"ContainerDied","Data":"bfe9bba830ac76f72adc2e647d726c432709084db421691029eb237d2490dc53"} Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.515943 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" event={"ID":"76453a2a-3b0c-40e3-8a89-f47adb86c0b0","Type":"ContainerDied","Data":"e37cdc1f6cf706a3b9605acc9f6519d9da4f90bef2e42f90b5947f1100f07ac6"} Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.516104 4961 scope.go:117] "RemoveContainer" containerID="075befaa3821d8d3d16b09903923a00b8c7d3e1447c486fc67a827debff4174d" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.516292 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-6bb4fc677f-r59rl" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.558239 4961 scope.go:117] "RemoveContainer" containerID="fbad90c1bdc6ee4426529eabf4d9e67ae68e8e86a55e7940b4750bad9325b84a" Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.586452 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-r59rl"] Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.604097 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-6bb4fc677f-r59rl"] Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.729859 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.741836 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.742102 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-log" containerID="cri-o://242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0" gracePeriod=30 Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.742251 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-api" containerID="cri-o://37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714" gracePeriod=30 Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.806031 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:30 crc kubenswrapper[4961]: I1205 17:53:30.878395 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" path="/var/lib/kubelet/pods/76453a2a-3b0c-40e3-8a89-f47adb86c0b0/volumes" Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.527013 4961 generic.go:334] "Generic (PLEG): container finished" podID="618faf17-2f1c-408f-82bd-ddee38044c18" containerID="242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0" exitCode=143 Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.527136 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"618faf17-2f1c-408f-82bd-ddee38044c18","Type":"ContainerDied","Data":"242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0"} Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.527303 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-log" containerID="cri-o://8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d" gracePeriod=30 Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.527363 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-metadata" containerID="cri-o://32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634" gracePeriod=30 Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.527796 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" containerName="nova-scheduler-scheduler" containerID="cri-o://f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e" gracePeriod=30 Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.921470 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.993105 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-scripts\") pod \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.993170 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-combined-ca-bundle\") pod \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.993284 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-config-data\") pod \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " Dec 05 17:53:31 crc kubenswrapper[4961]: I1205 17:53:31.993846 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-stq9s\" (UniqueName: \"kubernetes.io/projected/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-kube-api-access-stq9s\") pod \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\" (UID: \"02132a97-8a35-4fc1-80ae-fb221ab2b8c6\") " Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.005081 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-scripts" (OuterVolumeSpecName: "scripts") pod "02132a97-8a35-4fc1-80ae-fb221ab2b8c6" (UID: "02132a97-8a35-4fc1-80ae-fb221ab2b8c6"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.006853 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-kube-api-access-stq9s" (OuterVolumeSpecName: "kube-api-access-stq9s") pod "02132a97-8a35-4fc1-80ae-fb221ab2b8c6" (UID: "02132a97-8a35-4fc1-80ae-fb221ab2b8c6"). InnerVolumeSpecName "kube-api-access-stq9s". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.041038 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-config-data" (OuterVolumeSpecName: "config-data") pod "02132a97-8a35-4fc1-80ae-fb221ab2b8c6" (UID: "02132a97-8a35-4fc1-80ae-fb221ab2b8c6"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.043242 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "02132a97-8a35-4fc1-80ae-fb221ab2b8c6" (UID: "02132a97-8a35-4fc1-80ae-fb221ab2b8c6"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.065083 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.096665 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.096701 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-stq9s\" (UniqueName: \"kubernetes.io/projected/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-kube-api-access-stq9s\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.096711 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.096719 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/02132a97-8a35-4fc1-80ae-fb221ab2b8c6-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.197418 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-config-data\") pod \"6970b0ab-c215-4a93-9927-603b9aaea264\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.197473 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-combined-ca-bundle\") pod \"6970b0ab-c215-4a93-9927-603b9aaea264\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.197541 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6970b0ab-c215-4a93-9927-603b9aaea264-logs\") pod \"6970b0ab-c215-4a93-9927-603b9aaea264\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.197972 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6970b0ab-c215-4a93-9927-603b9aaea264-logs" (OuterVolumeSpecName: "logs") pod "6970b0ab-c215-4a93-9927-603b9aaea264" (UID: "6970b0ab-c215-4a93-9927-603b9aaea264"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.198176 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-nova-metadata-tls-certs\") pod \"6970b0ab-c215-4a93-9927-603b9aaea264\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.198268 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcv8b\" (UniqueName: \"kubernetes.io/projected/6970b0ab-c215-4a93-9927-603b9aaea264-kube-api-access-xcv8b\") pod \"6970b0ab-c215-4a93-9927-603b9aaea264\" (UID: \"6970b0ab-c215-4a93-9927-603b9aaea264\") " Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.198676 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/6970b0ab-c215-4a93-9927-603b9aaea264-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.202158 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6970b0ab-c215-4a93-9927-603b9aaea264-kube-api-access-xcv8b" (OuterVolumeSpecName: "kube-api-access-xcv8b") pod "6970b0ab-c215-4a93-9927-603b9aaea264" (UID: "6970b0ab-c215-4a93-9927-603b9aaea264"). InnerVolumeSpecName "kube-api-access-xcv8b". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.228159 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-config-data" (OuterVolumeSpecName: "config-data") pod "6970b0ab-c215-4a93-9927-603b9aaea264" (UID: "6970b0ab-c215-4a93-9927-603b9aaea264"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.233124 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "6970b0ab-c215-4a93-9927-603b9aaea264" (UID: "6970b0ab-c215-4a93-9927-603b9aaea264"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.255380 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "6970b0ab-c215-4a93-9927-603b9aaea264" (UID: "6970b0ab-c215-4a93-9927-603b9aaea264"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.299922 4961 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.299957 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcv8b\" (UniqueName: \"kubernetes.io/projected/6970b0ab-c215-4a93-9927-603b9aaea264-kube-api-access-xcv8b\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.299967 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.299976 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/6970b0ab-c215-4a93-9927-603b9aaea264-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537504 4961 generic.go:334] "Generic (PLEG): container finished" podID="6970b0ab-c215-4a93-9927-603b9aaea264" containerID="32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634" exitCode=0 Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537538 4961 generic.go:334] "Generic (PLEG): container finished" podID="6970b0ab-c215-4a93-9927-603b9aaea264" containerID="8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d" exitCode=143 Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537569 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537564 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6970b0ab-c215-4a93-9927-603b9aaea264","Type":"ContainerDied","Data":"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634"} Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537630 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6970b0ab-c215-4a93-9927-603b9aaea264","Type":"ContainerDied","Data":"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d"} Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537647 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"6970b0ab-c215-4a93-9927-603b9aaea264","Type":"ContainerDied","Data":"ad47125fc25af5d80345e48306ecf99b9d500df8963e951e5ac8ebcaadc8a45f"} Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.537666 4961 scope.go:117] "RemoveContainer" containerID="32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.539365 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" event={"ID":"02132a97-8a35-4fc1-80ae-fb221ab2b8c6","Type":"ContainerDied","Data":"8eb18bc57c35075ace8f1be7a16c2597a3b2b598ede316f994327fc5388a7e3f"} Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.539400 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8eb18bc57c35075ace8f1be7a16c2597a3b2b598ede316f994327fc5388a7e3f" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.539455 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-db-sync-dxcw5" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.568396 4961 scope.go:117] "RemoveContainer" containerID="8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.605630 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.628460 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.630794 4961 scope.go:117] "RemoveContainer" containerID="32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.631187 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634\": container with ID starting with 32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634 not found: ID does not exist" containerID="32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.631216 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634"} err="failed to get container status \"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634\": rpc error: code = NotFound desc = could not find container \"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634\": container with ID starting with 32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634 not found: ID does not exist" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.631236 4961 scope.go:117] "RemoveContainer" containerID="8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.631432 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d\": container with ID starting with 8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d not found: ID does not exist" containerID="8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.631453 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d"} err="failed to get container status \"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d\": rpc error: code = NotFound desc = could not find container \"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d\": container with ID starting with 8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d not found: ID does not exist" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.631467 4961 scope.go:117] "RemoveContainer" containerID="32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.631837 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634"} err="failed to get container status \"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634\": rpc error: code = NotFound desc = could not find container \"32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634\": container with ID starting with 32bb58666e5faf57fd931cf127b8090f9e74863d14f045d52f809dca891b6634 not found: ID does not exist" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.631858 4961 scope.go:117] "RemoveContainer" containerID="8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.634483 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d"} err="failed to get container status \"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d\": rpc error: code = NotFound desc = could not find container \"8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d\": container with ID starting with 8b13a8dbbe914b726d8412660240c2cff52659a85f66332c1dbebb7321969a0d not found: ID does not exist" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646096 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.646542 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerName="dnsmasq-dns" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646563 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerName="dnsmasq-dns" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.646577 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85bec0ac-6b6a-4532-a810-f468dded1d0e" containerName="nova-manage" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646583 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="85bec0ac-6b6a-4532-a810-f468dded1d0e" containerName="nova-manage" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.646593 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-metadata" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646599 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-metadata" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.646609 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-log" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646615 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-log" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.646633 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="02132a97-8a35-4fc1-80ae-fb221ab2b8c6" containerName="nova-cell1-conductor-db-sync" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646639 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="02132a97-8a35-4fc1-80ae-fb221ab2b8c6" containerName="nova-cell1-conductor-db-sync" Dec 05 17:53:32 crc kubenswrapper[4961]: E1205 17:53:32.646649 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerName="init" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646655 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerName="init" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646846 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-log" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646857 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="02132a97-8a35-4fc1-80ae-fb221ab2b8c6" containerName="nova-cell1-conductor-db-sync" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646870 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" containerName="nova-metadata-metadata" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646889 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="76453a2a-3b0c-40e3-8a89-f47adb86c0b0" containerName="dnsmasq-dns" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.646898 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="85bec0ac-6b6a-4532-a810-f468dded1d0e" containerName="nova-manage" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.647865 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.650936 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.651216 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.662241 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.672169 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.673736 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.676466 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-conductor-config-data" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.684847 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.708637 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sdztl\" (UniqueName: \"kubernetes.io/projected/594e1bb5-120e-4b26-909d-b8913c43e670-kube-api-access-sdztl\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.708683 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.708716 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.708954 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/594e1bb5-120e-4b26-909d-b8913c43e670-logs\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.709172 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-config-data\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813404 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sdztl\" (UniqueName: \"kubernetes.io/projected/594e1bb5-120e-4b26-909d-b8913c43e670-kube-api-access-sdztl\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813472 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813502 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813541 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e14fb01-680d-425f-a35b-c6346f47b86d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813617 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/594e1bb5-120e-4b26-909d-b8913c43e670-logs\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813752 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qzqqp\" (UniqueName: \"kubernetes.io/projected/1e14fb01-680d-425f-a35b-c6346f47b86d-kube-api-access-qzqqp\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813849 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-config-data\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.813892 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e14fb01-680d-425f-a35b-c6346f47b86d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.814125 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/594e1bb5-120e-4b26-909d-b8913c43e670-logs\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.817832 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.818279 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.820922 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-config-data\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.837016 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sdztl\" (UniqueName: \"kubernetes.io/projected/594e1bb5-120e-4b26-909d-b8913c43e670-kube-api-access-sdztl\") pod \"nova-metadata-0\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " pod="openstack/nova-metadata-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.875700 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6970b0ab-c215-4a93-9927-603b9aaea264" path="/var/lib/kubelet/pods/6970b0ab-c215-4a93-9927-603b9aaea264/volumes" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.914942 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e14fb01-680d-425f-a35b-c6346f47b86d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.915107 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qzqqp\" (UniqueName: \"kubernetes.io/projected/1e14fb01-680d-425f-a35b-c6346f47b86d-kube-api-access-qzqqp\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.915173 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e14fb01-680d-425f-a35b-c6346f47b86d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.918515 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1e14fb01-680d-425f-a35b-c6346f47b86d-combined-ca-bundle\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.921938 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1e14fb01-680d-425f-a35b-c6346f47b86d-config-data\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:32 crc kubenswrapper[4961]: I1205 17:53:32.937885 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qzqqp\" (UniqueName: \"kubernetes.io/projected/1e14fb01-680d-425f-a35b-c6346f47b86d-kube-api-access-qzqqp\") pod \"nova-cell1-conductor-0\" (UID: \"1e14fb01-680d-425f-a35b-c6346f47b86d\") " pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:33 crc kubenswrapper[4961]: I1205 17:53:33.013432 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:53:33 crc kubenswrapper[4961]: I1205 17:53:33.040954 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:33 crc kubenswrapper[4961]: W1205 17:53:33.474526 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod594e1bb5_120e_4b26_909d_b8913c43e670.slice/crio-823414a084f03dd059d3ecd3171a16486bf2e0abe87da9a07bce3154a197a84f WatchSource:0}: Error finding container 823414a084f03dd059d3ecd3171a16486bf2e0abe87da9a07bce3154a197a84f: Status 404 returned error can't find the container with id 823414a084f03dd059d3ecd3171a16486bf2e0abe87da9a07bce3154a197a84f Dec 05 17:53:33 crc kubenswrapper[4961]: I1205 17:53:33.481175 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:53:33 crc kubenswrapper[4961]: I1205 17:53:33.557821 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"594e1bb5-120e-4b26-909d-b8913c43e670","Type":"ContainerStarted","Data":"823414a084f03dd059d3ecd3171a16486bf2e0abe87da9a07bce3154a197a84f"} Dec 05 17:53:33 crc kubenswrapper[4961]: I1205 17:53:33.570678 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-conductor-0"] Dec 05 17:53:33 crc kubenswrapper[4961]: W1205 17:53:33.607069 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1e14fb01_680d_425f_a35b_c6346f47b86d.slice/crio-8c6d48477dc9d9b2704e0a8df885d87adae3677650c700b5b54092c75793cb7f WatchSource:0}: Error finding container 8c6d48477dc9d9b2704e0a8df885d87adae3677650c700b5b54092c75793cb7f: Status 404 returned error can't find the container with id 8c6d48477dc9d9b2704e0a8df885d87adae3677650c700b5b54092c75793cb7f Dec 05 17:53:33 crc kubenswrapper[4961]: E1205 17:53:33.709464 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 17:53:33 crc kubenswrapper[4961]: E1205 17:53:33.711453 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 17:53:33 crc kubenswrapper[4961]: E1205 17:53:33.712801 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 17:53:33 crc kubenswrapper[4961]: E1205 17:53:33.712836 4961 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" containerName="nova-scheduler-scheduler" Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.392925 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.393201 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/kube-state-metrics-0" podUID="5123c9de-dcdc-4189-8bad-330610afddd3" containerName="kube-state-metrics" containerID="cri-o://9b699dbd0951639780506a5a40563260fe0d7d437c24fb4c8baa72945380f33e" gracePeriod=30 Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.579385 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"594e1bb5-120e-4b26-909d-b8913c43e670","Type":"ContainerStarted","Data":"7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6"} Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.579716 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"594e1bb5-120e-4b26-909d-b8913c43e670","Type":"ContainerStarted","Data":"2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931"} Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.583182 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"1e14fb01-680d-425f-a35b-c6346f47b86d","Type":"ContainerStarted","Data":"0a853de7611336e23e25f678cfa45eef24ac005a6255b3b816266314ad2e1469"} Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.583222 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-conductor-0" event={"ID":"1e14fb01-680d-425f-a35b-c6346f47b86d","Type":"ContainerStarted","Data":"8c6d48477dc9d9b2704e0a8df885d87adae3677650c700b5b54092c75793cb7f"} Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.583454 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.589364 4961 generic.go:334] "Generic (PLEG): container finished" podID="5123c9de-dcdc-4189-8bad-330610afddd3" containerID="9b699dbd0951639780506a5a40563260fe0d7d437c24fb4c8baa72945380f33e" exitCode=2 Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.589392 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5123c9de-dcdc-4189-8bad-330610afddd3","Type":"ContainerDied","Data":"9b699dbd0951639780506a5a40563260fe0d7d437c24fb4c8baa72945380f33e"} Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.592735 4961 generic.go:334] "Generic (PLEG): container finished" podID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" containerID="f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e" exitCode=0 Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.592801 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0bfd30f6-3c39-4fb5-a918-439eb03161d5","Type":"ContainerDied","Data":"f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e"} Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.607547 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.607520126 podStartE2EDuration="2.607520126s" podCreationTimestamp="2025-12-05 17:53:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:34.600536583 +0000 UTC m=+1220.661687056" watchObservedRunningTime="2025-12-05 17:53:34.607520126 +0000 UTC m=+1220.668670599" Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.621368 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-conductor-0" podStartSLOduration=2.621346869 podStartE2EDuration="2.621346869s" podCreationTimestamp="2025-12-05 17:53:32 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:34.616235462 +0000 UTC m=+1220.677385955" watchObservedRunningTime="2025-12-05 17:53:34.621346869 +0000 UTC m=+1220.682497332" Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.880697 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.978554 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7jfzd\" (UniqueName: \"kubernetes.io/projected/5123c9de-dcdc-4189-8bad-330610afddd3-kube-api-access-7jfzd\") pod \"5123c9de-dcdc-4189-8bad-330610afddd3\" (UID: \"5123c9de-dcdc-4189-8bad-330610afddd3\") " Dec 05 17:53:34 crc kubenswrapper[4961]: I1205 17:53:34.986013 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5123c9de-dcdc-4189-8bad-330610afddd3-kube-api-access-7jfzd" (OuterVolumeSpecName: "kube-api-access-7jfzd") pod "5123c9de-dcdc-4189-8bad-330610afddd3" (UID: "5123c9de-dcdc-4189-8bad-330610afddd3"). InnerVolumeSpecName "kube-api-access-7jfzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.078325 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.083300 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7jfzd\" (UniqueName: \"kubernetes.io/projected/5123c9de-dcdc-4189-8bad-330610afddd3-kube-api-access-7jfzd\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.184248 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-combined-ca-bundle\") pod \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.184309 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tsdl\" (UniqueName: \"kubernetes.io/projected/0bfd30f6-3c39-4fb5-a918-439eb03161d5-kube-api-access-4tsdl\") pod \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.184376 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-config-data\") pod \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\" (UID: \"0bfd30f6-3c39-4fb5-a918-439eb03161d5\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.188510 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0bfd30f6-3c39-4fb5-a918-439eb03161d5-kube-api-access-4tsdl" (OuterVolumeSpecName: "kube-api-access-4tsdl") pod "0bfd30f6-3c39-4fb5-a918-439eb03161d5" (UID: "0bfd30f6-3c39-4fb5-a918-439eb03161d5"). InnerVolumeSpecName "kube-api-access-4tsdl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.212631 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "0bfd30f6-3c39-4fb5-a918-439eb03161d5" (UID: "0bfd30f6-3c39-4fb5-a918-439eb03161d5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.212666 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-config-data" (OuterVolumeSpecName: "config-data") pod "0bfd30f6-3c39-4fb5-a918-439eb03161d5" (UID: "0bfd30f6-3c39-4fb5-a918-439eb03161d5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.286416 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.286450 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tsdl\" (UniqueName: \"kubernetes.io/projected/0bfd30f6-3c39-4fb5-a918-439eb03161d5-kube-api-access-4tsdl\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.286465 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/0bfd30f6-3c39-4fb5-a918-439eb03161d5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.561440 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.612829 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"5123c9de-dcdc-4189-8bad-330610afddd3","Type":"ContainerDied","Data":"d48eff7bd430a8d53a0ade02089eab30f6398f879b082e1a458caf261f4fda7b"} Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.612878 4961 scope.go:117] "RemoveContainer" containerID="9b699dbd0951639780506a5a40563260fe0d7d437c24fb4c8baa72945380f33e" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.612988 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.616888 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.617219 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"0bfd30f6-3c39-4fb5-a918-439eb03161d5","Type":"ContainerDied","Data":"f6801c1787cb500023523465719fcbb58a683f267ba5a850c5a2fa32e260bbb7"} Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.622625 4961 generic.go:334] "Generic (PLEG): container finished" podID="618faf17-2f1c-408f-82bd-ddee38044c18" containerID="37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714" exitCode=0 Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.622719 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.622867 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"618faf17-2f1c-408f-82bd-ddee38044c18","Type":"ContainerDied","Data":"37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714"} Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.622899 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"618faf17-2f1c-408f-82bd-ddee38044c18","Type":"ContainerDied","Data":"9e81f4e9ca53299b3fcbae94418567176cdbd5611872a76eb4a5d2b043ddf2d5"} Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.638915 4961 scope.go:117] "RemoveContainer" containerID="f3de41d770866c19d9359b77cba447d07b77bdec86e871a773dd78bfa22a4c8e" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.652910 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.681348 4961 scope.go:117] "RemoveContainer" containerID="37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.684411 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.695420 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-config-data\") pod \"618faf17-2f1c-408f-82bd-ddee38044c18\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.695484 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c47b8\" (UniqueName: \"kubernetes.io/projected/618faf17-2f1c-408f-82bd-ddee38044c18-kube-api-access-c47b8\") pod \"618faf17-2f1c-408f-82bd-ddee38044c18\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.695576 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618faf17-2f1c-408f-82bd-ddee38044c18-logs\") pod \"618faf17-2f1c-408f-82bd-ddee38044c18\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.695697 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-combined-ca-bundle\") pod \"618faf17-2f1c-408f-82bd-ddee38044c18\" (UID: \"618faf17-2f1c-408f-82bd-ddee38044c18\") " Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.720141 4961 scope.go:117] "RemoveContainer" containerID="242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.721958 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.726730 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.756160 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/618faf17-2f1c-408f-82bd-ddee38044c18-kube-api-access-c47b8" (OuterVolumeSpecName: "kube-api-access-c47b8") pod "618faf17-2f1c-408f-82bd-ddee38044c18" (UID: "618faf17-2f1c-408f-82bd-ddee38044c18"). InnerVolumeSpecName "kube-api-access-c47b8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.761715 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/618faf17-2f1c-408f-82bd-ddee38044c18-logs" (OuterVolumeSpecName: "logs") pod "618faf17-2f1c-408f-82bd-ddee38044c18" (UID: "618faf17-2f1c-408f-82bd-ddee38044c18"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.782973 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: E1205 17:53:35.784510 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-api" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.784530 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-api" Dec 05 17:53:35 crc kubenswrapper[4961]: E1205 17:53:35.784566 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" containerName="nova-scheduler-scheduler" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.784572 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" containerName="nova-scheduler-scheduler" Dec 05 17:53:35 crc kubenswrapper[4961]: E1205 17:53:35.784580 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-log" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.784587 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-log" Dec 05 17:53:35 crc kubenswrapper[4961]: E1205 17:53:35.784602 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5123c9de-dcdc-4189-8bad-330610afddd3" containerName="kube-state-metrics" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.784608 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5123c9de-dcdc-4189-8bad-330610afddd3" containerName="kube-state-metrics" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.785622 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" containerName="nova-scheduler-scheduler" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.785636 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-api" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.785661 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5123c9de-dcdc-4189-8bad-330610afddd3" containerName="kube-state-metrics" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.785687 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" containerName="nova-api-log" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.821158 4961 scope.go:117] "RemoveContainer" containerID="37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.822370 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/618faf17-2f1c-408f-82bd-ddee38044c18-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.822386 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c47b8\" (UniqueName: \"kubernetes.io/projected/618faf17-2f1c-408f-82bd-ddee38044c18-kube-api-access-c47b8\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: E1205 17:53:35.824751 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714\": container with ID starting with 37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714 not found: ID does not exist" containerID="37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.824826 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714"} err="failed to get container status \"37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714\": rpc error: code = NotFound desc = could not find container \"37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714\": container with ID starting with 37fbb1670e7e440266cc24d18630eb3ac82b1a4d17a396a4d65b51c0cb06a714 not found: ID does not exist" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.824854 4961 scope.go:117] "RemoveContainer" containerID="242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0" Dec 05 17:53:35 crc kubenswrapper[4961]: E1205 17:53:35.826734 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0\": container with ID starting with 242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0 not found: ID does not exist" containerID="242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.826770 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0"} err="failed to get container status \"242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0\": rpc error: code = NotFound desc = could not find container \"242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0\": container with ID starting with 242bc2becd14d5d2cfef747ca9e5e5a609f8fb957a6078021933e678501d5bb0 not found: ID does not exist" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.843285 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.843329 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.844690 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.845519 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.845978 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.857187 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"kube-state-metrics-tls-config" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.857473 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-kube-state-metrics-svc" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.857859 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.872785 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-config-data" (OuterVolumeSpecName: "config-data") pod "618faf17-2f1c-408f-82bd-ddee38044c18" (UID: "618faf17-2f1c-408f-82bd-ddee38044c18"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924055 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924095 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-config-data\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924131 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924248 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d75hs\" (UniqueName: \"kubernetes.io/projected/c53690af-342f-487b-81f6-c9d9d03ba6c0-kube-api-access-d75hs\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924352 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zksjw\" (UniqueName: \"kubernetes.io/projected/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-api-access-zksjw\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924396 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924421 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.924480 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:35 crc kubenswrapper[4961]: I1205 17:53:35.942243 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "618faf17-2f1c-408f-82bd-ddee38044c18" (UID: "618faf17-2f1c-408f-82bd-ddee38044c18"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.026638 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-config-data\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.026917 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.027021 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d75hs\" (UniqueName: \"kubernetes.io/projected/c53690af-342f-487b-81f6-c9d9d03ba6c0-kube-api-access-d75hs\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.027171 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zksjw\" (UniqueName: \"kubernetes.io/projected/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-api-access-zksjw\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.027284 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.027367 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.027533 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.027679 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/618faf17-2f1c-408f-82bd-ddee38044c18-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.030876 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-config\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-state-metrics-tls-config\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.032639 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.033584 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-config-data\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.034391 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-state-metrics-tls-certs\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-state-metrics-tls-certs\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.041512 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e5e6ba28-82ee-411a-a9e1-46db404bdff6-combined-ca-bundle\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.044570 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zksjw\" (UniqueName: \"kubernetes.io/projected/e5e6ba28-82ee-411a-a9e1-46db404bdff6-kube-api-access-zksjw\") pod \"kube-state-metrics-0\" (UID: \"e5e6ba28-82ee-411a-a9e1-46db404bdff6\") " pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.046349 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d75hs\" (UniqueName: \"kubernetes.io/projected/c53690af-342f-487b-81f6-c9d9d03ba6c0-kube-api-access-d75hs\") pod \"nova-scheduler-0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.216819 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/kube-state-metrics-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.272933 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.298874 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.313405 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.314029 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.315937 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.321902 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.324554 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.435304 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.435398 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wtnbp\" (UniqueName: \"kubernetes.io/projected/c7a59662-9529-4704-89fd-1e74c46f7780-kube-api-access-wtnbp\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.435545 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-config-data\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.435572 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7a59662-9529-4704-89fd-1e74c46f7780-logs\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.538837 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.538897 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wtnbp\" (UniqueName: \"kubernetes.io/projected/c7a59662-9529-4704-89fd-1e74c46f7780-kube-api-access-wtnbp\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.539081 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-config-data\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.539105 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7a59662-9529-4704-89fd-1e74c46f7780-logs\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.539862 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7a59662-9529-4704-89fd-1e74c46f7780-logs\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.546343 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-config-data\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.547914 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.558324 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wtnbp\" (UniqueName: \"kubernetes.io/projected/c7a59662-9529-4704-89fd-1e74c46f7780-kube-api-access-wtnbp\") pod \"nova-api-0\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.642962 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.731036 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/kube-state-metrics-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: W1205 17:53:36.746687 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5e6ba28_82ee_411a_a9e1_46db404bdff6.slice/crio-86d812072277324efcad31f00cb1f5439e24405a08d2ae9f6a040cecd8c586e0 WatchSource:0}: Error finding container 86d812072277324efcad31f00cb1f5439e24405a08d2ae9f6a040cecd8c586e0: Status 404 returned error can't find the container with id 86d812072277324efcad31f00cb1f5439e24405a08d2ae9f6a040cecd8c586e0 Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.750294 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.841154 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: W1205 17:53:36.841921 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc53690af_342f_487b_81f6_c9d9d03ba6c0.slice/crio-937447da8e8ac2e3b3d723212ea0537e95da230b48ed5275686f21afdb506e85 WatchSource:0}: Error finding container 937447da8e8ac2e3b3d723212ea0537e95da230b48ed5275686f21afdb506e85: Status 404 returned error can't find the container with id 937447da8e8ac2e3b3d723212ea0537e95da230b48ed5275686f21afdb506e85 Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.876843 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0bfd30f6-3c39-4fb5-a918-439eb03161d5" path="/var/lib/kubelet/pods/0bfd30f6-3c39-4fb5-a918-439eb03161d5/volumes" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.877405 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5123c9de-dcdc-4189-8bad-330610afddd3" path="/var/lib/kubelet/pods/5123c9de-dcdc-4189-8bad-330610afddd3/volumes" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.879589 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="618faf17-2f1c-408f-82bd-ddee38044c18" path="/var/lib/kubelet/pods/618faf17-2f1c-408f-82bd-ddee38044c18/volumes" Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.920973 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.921321 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-central-agent" containerID="cri-o://85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40" gracePeriod=30 Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.921462 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="proxy-httpd" containerID="cri-o://f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837" gracePeriod=30 Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.921529 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-notification-agent" containerID="cri-o://cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec" gracePeriod=30 Dec 05 17:53:36 crc kubenswrapper[4961]: I1205 17:53:36.923441 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="sg-core" containerID="cri-o://264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756" gracePeriod=30 Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.118893 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:53:37 crc kubenswrapper[4961]: W1205 17:53:37.125737 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7a59662_9529_4704_89fd_1e74c46f7780.slice/crio-e2f9c7a5ddc84bb266e06b22180ed2f6a700de2854728fe4ce96afc03d158a40 WatchSource:0}: Error finding container e2f9c7a5ddc84bb266e06b22180ed2f6a700de2854728fe4ce96afc03d158a40: Status 404 returned error can't find the container with id e2f9c7a5ddc84bb266e06b22180ed2f6a700de2854728fe4ce96afc03d158a40 Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.650019 4961 generic.go:334] "Generic (PLEG): container finished" podID="40d6bdd6-246b-487a-abf4-5e44db189027" containerID="f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837" exitCode=0 Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.650316 4961 generic.go:334] "Generic (PLEG): container finished" podID="40d6bdd6-246b-487a-abf4-5e44db189027" containerID="264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756" exitCode=2 Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.650326 4961 generic.go:334] "Generic (PLEG): container finished" podID="40d6bdd6-246b-487a-abf4-5e44db189027" containerID="85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40" exitCode=0 Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.650103 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerDied","Data":"f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.650482 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerDied","Data":"264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.650498 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerDied","Data":"85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.654553 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c53690af-342f-487b-81f6-c9d9d03ba6c0","Type":"ContainerStarted","Data":"1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.654600 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c53690af-342f-487b-81f6-c9d9d03ba6c0","Type":"ContainerStarted","Data":"937447da8e8ac2e3b3d723212ea0537e95da230b48ed5275686f21afdb506e85"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.656355 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7a59662-9529-4704-89fd-1e74c46f7780","Type":"ContainerStarted","Data":"d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.656385 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7a59662-9529-4704-89fd-1e74c46f7780","Type":"ContainerStarted","Data":"76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.656397 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7a59662-9529-4704-89fd-1e74c46f7780","Type":"ContainerStarted","Data":"e2f9c7a5ddc84bb266e06b22180ed2f6a700de2854728fe4ce96afc03d158a40"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.658887 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e5e6ba28-82ee-411a-a9e1-46db404bdff6","Type":"ContainerStarted","Data":"400a1021903b7b7073d726273dda46fb7c37f0173d8fe2a67a541a4b576a9517"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.658922 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/kube-state-metrics-0" event={"ID":"e5e6ba28-82ee-411a-a9e1-46db404bdff6","Type":"ContainerStarted","Data":"86d812072277324efcad31f00cb1f5439e24405a08d2ae9f6a040cecd8c586e0"} Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.659311 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/kube-state-metrics-0" Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.671763 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.671746437 podStartE2EDuration="2.671746437s" podCreationTimestamp="2025-12-05 17:53:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:37.670068816 +0000 UTC m=+1223.731219309" watchObservedRunningTime="2025-12-05 17:53:37.671746437 +0000 UTC m=+1223.732896910" Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.693990 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=1.693971229 podStartE2EDuration="1.693971229s" podCreationTimestamp="2025-12-05 17:53:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:37.691468017 +0000 UTC m=+1223.752618490" watchObservedRunningTime="2025-12-05 17:53:37.693971229 +0000 UTC m=+1223.755121702" Dec 05 17:53:37 crc kubenswrapper[4961]: I1205 17:53:37.714039 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/kube-state-metrics-0" podStartSLOduration=2.356856992 podStartE2EDuration="2.714014798s" podCreationTimestamp="2025-12-05 17:53:35 +0000 UTC" firstStartedPulling="2025-12-05 17:53:36.750036966 +0000 UTC m=+1222.811187439" lastFinishedPulling="2025-12-05 17:53:37.107194772 +0000 UTC m=+1223.168345245" observedRunningTime="2025-12-05 17:53:37.709200848 +0000 UTC m=+1223.770351331" watchObservedRunningTime="2025-12-05 17:53:37.714014798 +0000 UTC m=+1223.775165271" Dec 05 17:53:38 crc kubenswrapper[4961]: I1205 17:53:38.013632 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:53:38 crc kubenswrapper[4961]: I1205 17:53:38.014038 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:53:38 crc kubenswrapper[4961]: I1205 17:53:38.072019 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-conductor-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.190828 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299022 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-scripts\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299103 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-config-data\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299154 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-24flb\" (UniqueName: \"kubernetes.io/projected/40d6bdd6-246b-487a-abf4-5e44db189027-kube-api-access-24flb\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299205 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-log-httpd\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299218 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-sg-core-conf-yaml\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299247 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-run-httpd\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.299263 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-combined-ca-bundle\") pod \"40d6bdd6-246b-487a-abf4-5e44db189027\" (UID: \"40d6bdd6-246b-487a-abf4-5e44db189027\") " Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.300970 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.301132 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.306683 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-scripts" (OuterVolumeSpecName: "scripts") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.306688 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40d6bdd6-246b-487a-abf4-5e44db189027-kube-api-access-24flb" (OuterVolumeSpecName: "kube-api-access-24flb") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "kube-api-access-24flb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.339196 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.374380 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.401061 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.401106 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-24flb\" (UniqueName: \"kubernetes.io/projected/40d6bdd6-246b-487a-abf4-5e44db189027-kube-api-access-24flb\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.401121 4961 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.401137 4961 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.401148 4961 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/40d6bdd6-246b-487a-abf4-5e44db189027-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.401162 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.403317 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-config-data" (OuterVolumeSpecName: "config-data") pod "40d6bdd6-246b-487a-abf4-5e44db189027" (UID: "40d6bdd6-246b-487a-abf4-5e44db189027"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.502332 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/40d6bdd6-246b-487a-abf4-5e44db189027-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.694050 4961 generic.go:334] "Generic (PLEG): container finished" podID="40d6bdd6-246b-487a-abf4-5e44db189027" containerID="cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec" exitCode=0 Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.694098 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerDied","Data":"cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec"} Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.694124 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"40d6bdd6-246b-487a-abf4-5e44db189027","Type":"ContainerDied","Data":"6558e03de38533ed6b09b03f1ba7710f172eb9929837deac172a8a62f4263a52"} Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.694142 4961 scope.go:117] "RemoveContainer" containerID="f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.694263 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.735622 4961 scope.go:117] "RemoveContainer" containerID="264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.737875 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.748157 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.761640 4961 scope.go:117] "RemoveContainer" containerID="cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.766195 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.766707 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="sg-core" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.766724 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="sg-core" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.766747 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="proxy-httpd" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.766756 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="proxy-httpd" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.766793 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-central-agent" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.766801 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-central-agent" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.766827 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-notification-agent" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.766834 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-notification-agent" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.767063 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="sg-core" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.767080 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-central-agent" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.767094 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="ceilometer-notification-agent" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.767119 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" containerName="proxy-httpd" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.769990 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.772663 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.772731 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.772740 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.777225 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.792094 4961 scope.go:117] "RemoveContainer" containerID="85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.810351 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.810429 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.810743 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-config-data\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.810958 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-run-httpd\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.810995 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-625jm\" (UniqueName: \"kubernetes.io/projected/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-kube-api-access-625jm\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.811157 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-log-httpd\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.811369 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-scripts\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.811528 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.826084 4961 scope.go:117] "RemoveContainer" containerID="f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.826850 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837\": container with ID starting with f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837 not found: ID does not exist" containerID="f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.826884 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837"} err="failed to get container status \"f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837\": rpc error: code = NotFound desc = could not find container \"f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837\": container with ID starting with f5713dc6ab50474397330b8581a605c5b32eb07f1e28a62c851e0f0d544c9837 not found: ID does not exist" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.826904 4961 scope.go:117] "RemoveContainer" containerID="264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.827321 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756\": container with ID starting with 264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756 not found: ID does not exist" containerID="264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.827372 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756"} err="failed to get container status \"264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756\": rpc error: code = NotFound desc = could not find container \"264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756\": container with ID starting with 264cfac70a49319869b757b9c8eeb9b8e140c1411e568d2b0ca153334ae3e756 not found: ID does not exist" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.827407 4961 scope.go:117] "RemoveContainer" containerID="cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.827742 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec\": container with ID starting with cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec not found: ID does not exist" containerID="cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.827766 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec"} err="failed to get container status \"cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec\": rpc error: code = NotFound desc = could not find container \"cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec\": container with ID starting with cca0ce3f6fe1dda59157fb358f3ed4e0427c0244cd431a08908adb017129ddec not found: ID does not exist" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.827799 4961 scope.go:117] "RemoveContainer" containerID="85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40" Dec 05 17:53:39 crc kubenswrapper[4961]: E1205 17:53:39.828050 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40\": container with ID starting with 85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40 not found: ID does not exist" containerID="85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.828068 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40"} err="failed to get container status \"85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40\": rpc error: code = NotFound desc = could not find container \"85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40\": container with ID starting with 85615c8f9b33de72b8c1a8439d7660a9dce959f974ffa94a73e8d694f21a4d40 not found: ID does not exist" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.916395 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-run-httpd\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.916814 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-625jm\" (UniqueName: \"kubernetes.io/projected/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-kube-api-access-625jm\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.916880 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-log-httpd\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.916996 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-run-httpd\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.917074 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-scripts\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.917102 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.917294 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.917387 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.917438 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-config-data\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.918492 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-log-httpd\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.922298 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.922657 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-scripts\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.922745 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.923271 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.924301 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-config-data\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:39 crc kubenswrapper[4961]: I1205 17:53:39.939020 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-625jm\" (UniqueName: \"kubernetes.io/projected/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-kube-api-access-625jm\") pod \"ceilometer-0\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " pod="openstack/ceilometer-0" Dec 05 17:53:40 crc kubenswrapper[4961]: I1205 17:53:40.101888 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:53:40 crc kubenswrapper[4961]: W1205 17:53:40.383784 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa726d06_fb2f_4abf_bdb9_4e22bc78a400.slice/crio-243d8e91467ecc0023623b38d7c744d6d3420e4bec672bd7b6294d5616d765d1 WatchSource:0}: Error finding container 243d8e91467ecc0023623b38d7c744d6d3420e4bec672bd7b6294d5616d765d1: Status 404 returned error can't find the container with id 243d8e91467ecc0023623b38d7c744d6d3420e4bec672bd7b6294d5616d765d1 Dec 05 17:53:40 crc kubenswrapper[4961]: I1205 17:53:40.384862 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:40 crc kubenswrapper[4961]: I1205 17:53:40.704031 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerStarted","Data":"243d8e91467ecc0023623b38d7c744d6d3420e4bec672bd7b6294d5616d765d1"} Dec 05 17:53:40 crc kubenswrapper[4961]: I1205 17:53:40.873397 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40d6bdd6-246b-487a-abf4-5e44db189027" path="/var/lib/kubelet/pods/40d6bdd6-246b-487a-abf4-5e44db189027/volumes" Dec 05 17:53:41 crc kubenswrapper[4961]: I1205 17:53:41.314498 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 17:53:41 crc kubenswrapper[4961]: I1205 17:53:41.716345 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerStarted","Data":"c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283"} Dec 05 17:53:42 crc kubenswrapper[4961]: I1205 17:53:42.736418 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerStarted","Data":"a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413"} Dec 05 17:53:43 crc kubenswrapper[4961]: I1205 17:53:43.013668 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:53:43 crc kubenswrapper[4961]: I1205 17:53:43.014079 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:53:43 crc kubenswrapper[4961]: I1205 17:53:43.747094 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerStarted","Data":"fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4"} Dec 05 17:53:44 crc kubenswrapper[4961]: I1205 17:53:44.032863 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:53:44 crc kubenswrapper[4961]: I1205 17:53:44.033118 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:53:44 crc kubenswrapper[4961]: I1205 17:53:44.759959 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerStarted","Data":"a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240"} Dec 05 17:53:44 crc kubenswrapper[4961]: I1205 17:53:44.761620 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:53:44 crc kubenswrapper[4961]: I1205 17:53:44.793272 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.165550396 podStartE2EDuration="5.793247257s" podCreationTimestamp="2025-12-05 17:53:39 +0000 UTC" firstStartedPulling="2025-12-05 17:53:40.386573437 +0000 UTC m=+1226.447723910" lastFinishedPulling="2025-12-05 17:53:44.014270298 +0000 UTC m=+1230.075420771" observedRunningTime="2025-12-05 17:53:44.790028846 +0000 UTC m=+1230.851179329" watchObservedRunningTime="2025-12-05 17:53:44.793247257 +0000 UTC m=+1230.854397730" Dec 05 17:53:46 crc kubenswrapper[4961]: I1205 17:53:46.225898 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/kube-state-metrics-0" Dec 05 17:53:46 crc kubenswrapper[4961]: I1205 17:53:46.314457 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 17:53:46 crc kubenswrapper[4961]: I1205 17:53:46.349419 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 17:53:46 crc kubenswrapper[4961]: I1205 17:53:46.643301 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:53:46 crc kubenswrapper[4961]: I1205 17:53:46.643358 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:53:46 crc kubenswrapper[4961]: I1205 17:53:46.808653 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 17:53:47 crc kubenswrapper[4961]: I1205 17:53:47.726144 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-api" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:53:47 crc kubenswrapper[4961]: I1205 17:53:47.726275 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-log" probeResult="failure" output="Get \"http://10.217.0.195:8774/\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.018827 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.020161 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.025012 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:53:53 crc kubenswrapper[4961]: E1205 17:53:53.503912 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod474ec1a8_a032_453c_aa79_d05a4cc04c0f.slice/crio-15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.807198 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.856704 4961 generic.go:334] "Generic (PLEG): container finished" podID="474ec1a8-a032-453c-aa79-d05a4cc04c0f" containerID="15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc" exitCode=137 Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.856748 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.856795 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"474ec1a8-a032-453c-aa79-d05a4cc04c0f","Type":"ContainerDied","Data":"15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc"} Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.856834 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"474ec1a8-a032-453c-aa79-d05a4cc04c0f","Type":"ContainerDied","Data":"3acebaf711b5549d86e159c7963b0c501ea91ff152fe100a9d7ed03f058c0b90"} Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.856850 4961 scope.go:117] "RemoveContainer" containerID="15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.862443 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.879226 4961 scope.go:117] "RemoveContainer" containerID="15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc" Dec 05 17:53:53 crc kubenswrapper[4961]: E1205 17:53:53.879738 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc\": container with ID starting with 15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc not found: ID does not exist" containerID="15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.879790 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc"} err="failed to get container status \"15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc\": rpc error: code = NotFound desc = could not find container \"15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc\": container with ID starting with 15c78af6a9ea119e8ed679f8b97411ee09b5836246d44935e47bb11d68b09fdc not found: ID does not exist" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.904399 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-77hl2\" (UniqueName: \"kubernetes.io/projected/474ec1a8-a032-453c-aa79-d05a4cc04c0f-kube-api-access-77hl2\") pod \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.904492 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-config-data\") pod \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.904598 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-combined-ca-bundle\") pod \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\" (UID: \"474ec1a8-a032-453c-aa79-d05a4cc04c0f\") " Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.910237 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/474ec1a8-a032-453c-aa79-d05a4cc04c0f-kube-api-access-77hl2" (OuterVolumeSpecName: "kube-api-access-77hl2") pod "474ec1a8-a032-453c-aa79-d05a4cc04c0f" (UID: "474ec1a8-a032-453c-aa79-d05a4cc04c0f"). InnerVolumeSpecName "kube-api-access-77hl2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.940572 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "474ec1a8-a032-453c-aa79-d05a4cc04c0f" (UID: "474ec1a8-a032-453c-aa79-d05a4cc04c0f"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:53 crc kubenswrapper[4961]: I1205 17:53:53.957078 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-config-data" (OuterVolumeSpecName: "config-data") pod "474ec1a8-a032-453c-aa79-d05a4cc04c0f" (UID: "474ec1a8-a032-453c-aa79-d05a4cc04c0f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.007093 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-77hl2\" (UniqueName: \"kubernetes.io/projected/474ec1a8-a032-453c-aa79-d05a4cc04c0f-kube-api-access-77hl2\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.007124 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.007136 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/474ec1a8-a032-453c-aa79-d05a4cc04c0f-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.189682 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.205172 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.219878 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:54 crc kubenswrapper[4961]: E1205 17:53:54.220382 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="474ec1a8-a032-453c-aa79-d05a4cc04c0f" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.220405 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="474ec1a8-a032-453c-aa79-d05a4cc04c0f" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.220684 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="474ec1a8-a032-453c-aa79-d05a4cc04c0f" containerName="nova-cell1-novncproxy-novncproxy" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.221451 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.224867 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-public-svc" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.224940 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-novncproxy-cell1-vencrypt" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.225105 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-novncproxy-config-data" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.232056 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.311236 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.311426 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.311457 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.311486 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.311534 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wf7rz\" (UniqueName: \"kubernetes.io/projected/2de9d115-6198-4316-a304-1e4eca7cdd98-kube-api-access-wf7rz\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.414395 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.414584 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.414614 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.414644 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.414670 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wf7rz\" (UniqueName: \"kubernetes.io/projected/2de9d115-6198-4316-a304-1e4eca7cdd98-kube-api-access-wf7rz\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.418689 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-combined-ca-bundle\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.418897 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-novncproxy-tls-certs\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-nova-novncproxy-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.420120 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"vencrypt-tls-certs\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-vencrypt-tls-certs\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.421485 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2de9d115-6198-4316-a304-1e4eca7cdd98-config-data\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.433159 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wf7rz\" (UniqueName: \"kubernetes.io/projected/2de9d115-6198-4316-a304-1e4eca7cdd98-kube-api-access-wf7rz\") pod \"nova-cell1-novncproxy-0\" (UID: \"2de9d115-6198-4316-a304-1e4eca7cdd98\") " pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.544009 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:54 crc kubenswrapper[4961]: I1205 17:53:54.932161 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="474ec1a8-a032-453c-aa79-d05a4cc04c0f" path="/var/lib/kubelet/pods/474ec1a8-a032-453c-aa79-d05a4cc04c0f/volumes" Dec 05 17:53:55 crc kubenswrapper[4961]: I1205 17:53:55.054150 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-novncproxy-0"] Dec 05 17:53:55 crc kubenswrapper[4961]: W1205 17:53:55.059109 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2de9d115_6198_4316_a304_1e4eca7cdd98.slice/crio-82b2ea791eb0319c975eedae0842165b778d819a1b315471933f0c78534a6abf WatchSource:0}: Error finding container 82b2ea791eb0319c975eedae0842165b778d819a1b315471933f0c78534a6abf: Status 404 returned error can't find the container with id 82b2ea791eb0319c975eedae0842165b778d819a1b315471933f0c78534a6abf Dec 05 17:53:55 crc kubenswrapper[4961]: I1205 17:53:55.943229 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2de9d115-6198-4316-a304-1e4eca7cdd98","Type":"ContainerStarted","Data":"54315e0432a752b908b7cb74ac165a998dc1514cfb4e7de35e292c8177e333d2"} Dec 05 17:53:55 crc kubenswrapper[4961]: I1205 17:53:55.943534 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-novncproxy-0" event={"ID":"2de9d115-6198-4316-a304-1e4eca7cdd98","Type":"ContainerStarted","Data":"82b2ea791eb0319c975eedae0842165b778d819a1b315471933f0c78534a6abf"} Dec 05 17:53:55 crc kubenswrapper[4961]: I1205 17:53:55.977200 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-novncproxy-0" podStartSLOduration=1.9771836280000001 podStartE2EDuration="1.977183628s" podCreationTimestamp="2025-12-05 17:53:54 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:53:55.965399826 +0000 UTC m=+1242.026550329" watchObservedRunningTime="2025-12-05 17:53:55.977183628 +0000 UTC m=+1242.038334101" Dec 05 17:53:56 crc kubenswrapper[4961]: I1205 17:53:56.646971 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:53:56 crc kubenswrapper[4961]: I1205 17:53:56.648246 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:53:56 crc kubenswrapper[4961]: I1205 17:53:56.650502 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:53:56 crc kubenswrapper[4961]: I1205 17:53:56.653810 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:53:56 crc kubenswrapper[4961]: I1205 17:53:56.954750 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:53:56 crc kubenswrapper[4961]: I1205 17:53:56.959866 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.155077 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kzlzv"] Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.157511 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.171946 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-config\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.172001 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.172062 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.172091 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.172159 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cd29z\" (UniqueName: \"kubernetes.io/projected/caec0c5b-2cc3-4fdb-9f16-8653966fca15-kube-api-access-cd29z\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.172186 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.176053 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kzlzv"] Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.274000 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cd29z\" (UniqueName: \"kubernetes.io/projected/caec0c5b-2cc3-4fdb-9f16-8653966fca15-kube-api-access-cd29z\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.274044 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.274084 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-config\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.274108 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.274160 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.274185 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.275384 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-swift-storage-0\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.275412 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-config\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.275431 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-svc\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.276042 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-sb\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.276156 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-nb\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.297266 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cd29z\" (UniqueName: \"kubernetes.io/projected/caec0c5b-2cc3-4fdb-9f16-8653966fca15-kube-api-access-cd29z\") pod \"dnsmasq-dns-5c7b6c5df9-kzlzv\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:57 crc kubenswrapper[4961]: I1205 17:53:57.543919 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:53:58 crc kubenswrapper[4961]: I1205 17:53:58.141236 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kzlzv"] Dec 05 17:53:58 crc kubenswrapper[4961]: I1205 17:53:58.974959 4961 generic.go:334] "Generic (PLEG): container finished" podID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerID="dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059" exitCode=0 Dec 05 17:53:58 crc kubenswrapper[4961]: I1205 17:53:58.975060 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" event={"ID":"caec0c5b-2cc3-4fdb-9f16-8653966fca15","Type":"ContainerDied","Data":"dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059"} Dec 05 17:53:58 crc kubenswrapper[4961]: I1205 17:53:58.975252 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" event={"ID":"caec0c5b-2cc3-4fdb-9f16-8653966fca15","Type":"ContainerStarted","Data":"84a352c084cead6fa7fbb576e76ecbc954243be477a6c7ddc838743b12050a86"} Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.450039 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.450676 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-central-agent" containerID="cri-o://c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283" gracePeriod=30 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.450759 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="sg-core" containerID="cri-o://fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4" gracePeriod=30 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.450833 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-notification-agent" containerID="cri-o://a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413" gracePeriod=30 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.450743 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/ceilometer-0" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="proxy-httpd" containerID="cri-o://a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240" gracePeriod=30 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.461032 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/ceilometer-0" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="proxy-httpd" probeResult="failure" output="Get \"https://10.217.0.196:3000/\": EOF" Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.545167 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.989378 4961 generic.go:334] "Generic (PLEG): container finished" podID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerID="a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240" exitCode=0 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.989408 4961 generic.go:334] "Generic (PLEG): container finished" podID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerID="fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4" exitCode=2 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.989415 4961 generic.go:334] "Generic (PLEG): container finished" podID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerID="c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283" exitCode=0 Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.989452 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerDied","Data":"a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240"} Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.989479 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerDied","Data":"fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4"} Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.989491 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerDied","Data":"c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283"} Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.991512 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" event={"ID":"caec0c5b-2cc3-4fdb-9f16-8653966fca15","Type":"ContainerStarted","Data":"c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8"} Dec 05 17:53:59 crc kubenswrapper[4961]: I1205 17:53:59.992812 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:54:00 crc kubenswrapper[4961]: I1205 17:54:00.018651 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" podStartSLOduration=3.018633692 podStartE2EDuration="3.018633692s" podCreationTimestamp="2025-12-05 17:53:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:54:00.015428083 +0000 UTC m=+1246.076578586" watchObservedRunningTime="2025-12-05 17:54:00.018633692 +0000 UTC m=+1246.079784155" Dec 05 17:54:00 crc kubenswrapper[4961]: I1205 17:54:00.424272 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:00 crc kubenswrapper[4961]: I1205 17:54:00.424688 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-log" containerID="cri-o://76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc" gracePeriod=30 Dec 05 17:54:00 crc kubenswrapper[4961]: I1205 17:54:00.425180 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-api" containerID="cri-o://d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d" gracePeriod=30 Dec 05 17:54:01 crc kubenswrapper[4961]: I1205 17:54:01.002520 4961 generic.go:334] "Generic (PLEG): container finished" podID="c7a59662-9529-4704-89fd-1e74c46f7780" containerID="76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc" exitCode=143 Dec 05 17:54:01 crc kubenswrapper[4961]: I1205 17:54:01.002621 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7a59662-9529-4704-89fd-1e74c46f7780","Type":"ContainerDied","Data":"76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc"} Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.751366 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:54:03 crc kubenswrapper[4961]: E1205 17:54:03.810309 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7a59662_9529_4704_89fd_1e74c46f7780.slice/crio-conmon-d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7a59662_9529_4704_89fd_1e74c46f7780.slice/crio-d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847372 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-sg-core-conf-yaml\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847710 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-config-data\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847761 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-625jm\" (UniqueName: \"kubernetes.io/projected/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-kube-api-access-625jm\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847796 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-run-httpd\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847856 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-ceilometer-tls-certs\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847902 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-scripts\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847955 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-combined-ca-bundle\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.847976 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-log-httpd\") pod \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\" (UID: \"aa726d06-fb2f-4abf-bdb9-4e22bc78a400\") " Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.848816 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-run-httpd" (OuterVolumeSpecName: "run-httpd") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "run-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.848868 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-log-httpd" (OuterVolumeSpecName: "log-httpd") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "log-httpd". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.853873 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-scripts" (OuterVolumeSpecName: "scripts") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.860552 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-kube-api-access-625jm" (OuterVolumeSpecName: "kube-api-access-625jm") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "kube-api-access-625jm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.905948 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-ceilometer-tls-certs" (OuterVolumeSpecName: "ceilometer-tls-certs") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "ceilometer-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.907578 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-sg-core-conf-yaml" (OuterVolumeSpecName: "sg-core-conf-yaml") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "sg-core-conf-yaml". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.930723 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950339 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950370 4961 reconciler_common.go:293] "Volume detached for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-log-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950380 4961 reconciler_common.go:293] "Volume detached for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-sg-core-conf-yaml\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950390 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-625jm\" (UniqueName: \"kubernetes.io/projected/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-kube-api-access-625jm\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950402 4961 reconciler_common.go:293] "Volume detached for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-run-httpd\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950409 4961 reconciler_common.go:293] "Volume detached for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-ceilometer-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.950417 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.972329 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:03 crc kubenswrapper[4961]: I1205 17:54:03.976358 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-config-data" (OuterVolumeSpecName: "config-data") pod "aa726d06-fb2f-4abf-bdb9-4e22bc78a400" (UID: "aa726d06-fb2f-4abf-bdb9-4e22bc78a400"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.031284 4961 generic.go:334] "Generic (PLEG): container finished" podID="c7a59662-9529-4704-89fd-1e74c46f7780" containerID="d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d" exitCode=0 Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.031343 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.031370 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7a59662-9529-4704-89fd-1e74c46f7780","Type":"ContainerDied","Data":"d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d"} Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.031420 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"c7a59662-9529-4704-89fd-1e74c46f7780","Type":"ContainerDied","Data":"e2f9c7a5ddc84bb266e06b22180ed2f6a700de2854728fe4ce96afc03d158a40"} Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.031442 4961 scope.go:117] "RemoveContainer" containerID="d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.038873 4961 generic.go:334] "Generic (PLEG): container finished" podID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerID="a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413" exitCode=0 Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.038914 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerDied","Data":"a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413"} Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.038939 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"aa726d06-fb2f-4abf-bdb9-4e22bc78a400","Type":"ContainerDied","Data":"243d8e91467ecc0023623b38d7c744d6d3420e4bec672bd7b6294d5616d765d1"} Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.039009 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.051032 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7a59662-9529-4704-89fd-1e74c46f7780-logs\") pod \"c7a59662-9529-4704-89fd-1e74c46f7780\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.051116 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-config-data\") pod \"c7a59662-9529-4704-89fd-1e74c46f7780\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.051187 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wtnbp\" (UniqueName: \"kubernetes.io/projected/c7a59662-9529-4704-89fd-1e74c46f7780-kube-api-access-wtnbp\") pod \"c7a59662-9529-4704-89fd-1e74c46f7780\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.051280 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-combined-ca-bundle\") pod \"c7a59662-9529-4704-89fd-1e74c46f7780\" (UID: \"c7a59662-9529-4704-89fd-1e74c46f7780\") " Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.051757 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/aa726d06-fb2f-4abf-bdb9-4e22bc78a400-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.052514 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7a59662-9529-4704-89fd-1e74c46f7780-logs" (OuterVolumeSpecName: "logs") pod "c7a59662-9529-4704-89fd-1e74c46f7780" (UID: "c7a59662-9529-4704-89fd-1e74c46f7780"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.057596 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7a59662-9529-4704-89fd-1e74c46f7780-kube-api-access-wtnbp" (OuterVolumeSpecName: "kube-api-access-wtnbp") pod "c7a59662-9529-4704-89fd-1e74c46f7780" (UID: "c7a59662-9529-4704-89fd-1e74c46f7780"). InnerVolumeSpecName "kube-api-access-wtnbp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.067040 4961 scope.go:117] "RemoveContainer" containerID="76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.092484 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-config-data" (OuterVolumeSpecName: "config-data") pod "c7a59662-9529-4704-89fd-1e74c46f7780" (UID: "c7a59662-9529-4704-89fd-1e74c46f7780"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.094148 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c7a59662-9529-4704-89fd-1e74c46f7780" (UID: "c7a59662-9529-4704-89fd-1e74c46f7780"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.096266 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.111921 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.120947 4961 scope.go:117] "RemoveContainer" containerID="d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.121795 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d\": container with ID starting with d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d not found: ID does not exist" containerID="d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.121851 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d"} err="failed to get container status \"d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d\": rpc error: code = NotFound desc = could not find container \"d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d\": container with ID starting with d92c3d8ebc74d843efb71475e1f4313cbaac899381c6ad2eefc1750e828c839d not found: ID does not exist" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.121885 4961 scope.go:117] "RemoveContainer" containerID="76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.122415 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc\": container with ID starting with 76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc not found: ID does not exist" containerID="76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.122446 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc"} err="failed to get container status \"76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc\": rpc error: code = NotFound desc = could not find container \"76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc\": container with ID starting with 76c9038be3ea78ba1036aad76ce4088712198d4eefee685f3b33a0e9056d09bc not found: ID does not exist" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.122465 4961 scope.go:117] "RemoveContainer" containerID="a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123133 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.123516 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="proxy-httpd" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123531 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="proxy-httpd" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.123548 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-notification-agent" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123556 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-notification-agent" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.123568 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-api" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123573 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-api" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.123592 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-log" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123598 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-log" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.123609 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="sg-core" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123615 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="sg-core" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.123627 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-central-agent" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123633 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-central-agent" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123828 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-central-agent" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123852 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="proxy-httpd" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123866 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="ceilometer-notification-agent" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123880 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-api" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123892 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" containerName="nova-api-log" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.123899 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" containerName="sg-core" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.126040 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.131865 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.132039 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-config-data" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.132195 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-scripts" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.134112 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-ceilometer-internal-svc" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154320 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b4tcn\" (UniqueName: \"kubernetes.io/projected/a61be49f-b67b-4cd9-8790-12fe7dfde50b-kube-api-access-b4tcn\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154397 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-scripts\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154427 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-config-data\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154448 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154467 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154498 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a61be49f-b67b-4cd9-8790-12fe7dfde50b-run-httpd\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154516 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154541 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a61be49f-b67b-4cd9-8790-12fe7dfde50b-log-httpd\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154619 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/c7a59662-9529-4704-89fd-1e74c46f7780-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154630 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154639 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wtnbp\" (UniqueName: \"kubernetes.io/projected/c7a59662-9529-4704-89fd-1e74c46f7780-kube-api-access-wtnbp\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.154648 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c7a59662-9529-4704-89fd-1e74c46f7780-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.164692 4961 scope.go:117] "RemoveContainer" containerID="fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.203609 4961 scope.go:117] "RemoveContainer" containerID="a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.225086 4961 scope.go:117] "RemoveContainer" containerID="c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.245586 4961 scope.go:117] "RemoveContainer" containerID="a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.246099 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240\": container with ID starting with a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240 not found: ID does not exist" containerID="a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.246128 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240"} err="failed to get container status \"a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240\": rpc error: code = NotFound desc = could not find container \"a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240\": container with ID starting with a00ccd2179bc8ab6a4fe1664e479bb1bf25f1c3aaccbc27178867afa4f271240 not found: ID does not exist" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.246148 4961 scope.go:117] "RemoveContainer" containerID="fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.246578 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4\": container with ID starting with fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4 not found: ID does not exist" containerID="fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.246601 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4"} err="failed to get container status \"fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4\": rpc error: code = NotFound desc = could not find container \"fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4\": container with ID starting with fcd194b9277bcd204f581db45e6b17d1dae4fd22f5d1e963b2ee902dd0a4ecf4 not found: ID does not exist" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.246613 4961 scope.go:117] "RemoveContainer" containerID="a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.246852 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413\": container with ID starting with a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413 not found: ID does not exist" containerID="a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.246872 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413"} err="failed to get container status \"a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413\": rpc error: code = NotFound desc = could not find container \"a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413\": container with ID starting with a566ebd7f84435e6a43ec333a0914b9125caf103cec2cef556aa82de14591413 not found: ID does not exist" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.246889 4961 scope.go:117] "RemoveContainer" containerID="c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283" Dec 05 17:54:04 crc kubenswrapper[4961]: E1205 17:54:04.247143 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283\": container with ID starting with c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283 not found: ID does not exist" containerID="c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.247164 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283"} err="failed to get container status \"c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283\": rpc error: code = NotFound desc = could not find container \"c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283\": container with ID starting with c600a3141ab303631bbfe94db4d5936183d957854461c530ee41a34950004283 not found: ID does not exist" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258403 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a61be49f-b67b-4cd9-8790-12fe7dfde50b-run-httpd\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258444 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258487 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a61be49f-b67b-4cd9-8790-12fe7dfde50b-log-httpd\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258587 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b4tcn\" (UniqueName: \"kubernetes.io/projected/a61be49f-b67b-4cd9-8790-12fe7dfde50b-kube-api-access-b4tcn\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258666 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-scripts\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258701 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-config-data\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258729 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.258753 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.259357 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a61be49f-b67b-4cd9-8790-12fe7dfde50b-run-httpd\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.259419 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-httpd\" (UniqueName: \"kubernetes.io/empty-dir/a61be49f-b67b-4cd9-8790-12fe7dfde50b-log-httpd\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.262716 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-tls-certs\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-ceilometer-tls-certs\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.262839 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-combined-ca-bundle\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.262973 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-scripts\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.263076 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"sg-core-conf-yaml\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-sg-core-conf-yaml\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.264904 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a61be49f-b67b-4cd9-8790-12fe7dfde50b-config-data\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.279545 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b4tcn\" (UniqueName: \"kubernetes.io/projected/a61be49f-b67b-4cd9-8790-12fe7dfde50b-kube-api-access-b4tcn\") pod \"ceilometer-0\" (UID: \"a61be49f-b67b-4cd9-8790-12fe7dfde50b\") " pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.377356 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.389882 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.403556 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.405307 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.407995 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.409043 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.409101 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.412166 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.460461 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ceilometer-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.461660 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.461696 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-public-tls-certs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.461806 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sth2h\" (UniqueName: \"kubernetes.io/projected/ed08e008-fa51-488f-86dd-d9b81d1572f5-kube-api-access-sth2h\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.461830 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-config-data\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.461858 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed08e008-fa51-488f-86dd-d9b81d1572f5-logs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.461882 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.550086 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.564410 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.566340 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-public-tls-certs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.566551 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sth2h\" (UniqueName: \"kubernetes.io/projected/ed08e008-fa51-488f-86dd-d9b81d1572f5-kube-api-access-sth2h\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.566677 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-config-data\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.566697 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed08e008-fa51-488f-86dd-d9b81d1572f5-logs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.566746 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.571607 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed08e008-fa51-488f-86dd-d9b81d1572f5-logs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.572281 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.574992 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-internal-tls-certs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.575180 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-config-data\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.577257 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-public-tls-certs\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.586178 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.588060 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sth2h\" (UniqueName: \"kubernetes.io/projected/ed08e008-fa51-488f-86dd-d9b81d1572f5-kube-api-access-sth2h\") pod \"nova-api-0\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.766327 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.881699 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa726d06-fb2f-4abf-bdb9-4e22bc78a400" path="/var/lib/kubelet/pods/aa726d06-fb2f-4abf-bdb9-4e22bc78a400/volumes" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.882594 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7a59662-9529-4704-89fd-1e74c46f7780" path="/var/lib/kubelet/pods/c7a59662-9529-4704-89fd-1e74c46f7780/volumes" Dec 05 17:54:04 crc kubenswrapper[4961]: I1205 17:54:04.924206 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ceilometer-0"] Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.059894 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a61be49f-b67b-4cd9-8790-12fe7dfde50b","Type":"ContainerStarted","Data":"3c04fb6a5b277ba31e1a7f327d7cd1cfe3a502a0002b1630eb5108800d9e3a05"} Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.077411 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-cell1-novncproxy-0" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.224043 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:05 crc kubenswrapper[4961]: W1205 17:54:05.233491 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poded08e008_fa51_488f_86dd_d9b81d1572f5.slice/crio-7c1307b74782ff5f41eb0a024856067c7c4c1bf1357e751ba0bb892e83adc16a WatchSource:0}: Error finding container 7c1307b74782ff5f41eb0a024856067c7c4c1bf1357e751ba0bb892e83adc16a: Status 404 returned error can't find the container with id 7c1307b74782ff5f41eb0a024856067c7c4c1bf1357e751ba0bb892e83adc16a Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.290752 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrgs"] Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.293123 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.297111 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-scripts" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.297316 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-manage-config-data" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.302705 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrgs"] Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.383751 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.383996 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-scripts\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.384229 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-config-data\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.384386 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q8t2r\" (UniqueName: \"kubernetes.io/projected/5656c8dd-d36b-48ae-a272-01f789cf280d-kube-api-access-q8t2r\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.486219 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q8t2r\" (UniqueName: \"kubernetes.io/projected/5656c8dd-d36b-48ae-a272-01f789cf280d-kube-api-access-q8t2r\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.486308 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.486338 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-scripts\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.486486 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-config-data\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.492879 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-combined-ca-bundle\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.492915 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-config-data\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.494009 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-scripts\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.506446 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q8t2r\" (UniqueName: \"kubernetes.io/projected/5656c8dd-d36b-48ae-a272-01f789cf280d-kube-api-access-q8t2r\") pod \"nova-cell1-cell-mapping-rkrgs\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:05 crc kubenswrapper[4961]: I1205 17:54:05.779818 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:06 crc kubenswrapper[4961]: I1205 17:54:06.077060 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a61be49f-b67b-4cd9-8790-12fe7dfde50b","Type":"ContainerStarted","Data":"ebe6e6f577b56689254795e826310b4a6ad19ba60ad3406d60d5575bfb704804"} Dec 05 17:54:06 crc kubenswrapper[4961]: I1205 17:54:06.080426 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ed08e008-fa51-488f-86dd-d9b81d1572f5","Type":"ContainerStarted","Data":"7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78"} Dec 05 17:54:06 crc kubenswrapper[4961]: I1205 17:54:06.080471 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ed08e008-fa51-488f-86dd-d9b81d1572f5","Type":"ContainerStarted","Data":"943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987"} Dec 05 17:54:06 crc kubenswrapper[4961]: I1205 17:54:06.080486 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ed08e008-fa51-488f-86dd-d9b81d1572f5","Type":"ContainerStarted","Data":"7c1307b74782ff5f41eb0a024856067c7c4c1bf1357e751ba0bb892e83adc16a"} Dec 05 17:54:06 crc kubenswrapper[4961]: I1205 17:54:06.113510 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.113491692 podStartE2EDuration="2.113491692s" podCreationTimestamp="2025-12-05 17:54:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:54:06.101376251 +0000 UTC m=+1252.162526744" watchObservedRunningTime="2025-12-05 17:54:06.113491692 +0000 UTC m=+1252.174642165" Dec 05 17:54:06 crc kubenswrapper[4961]: W1205 17:54:06.231085 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5656c8dd_d36b_48ae_a272_01f789cf280d.slice/crio-4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6 WatchSource:0}: Error finding container 4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6: Status 404 returned error can't find the container with id 4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6 Dec 05 17:54:06 crc kubenswrapper[4961]: I1205 17:54:06.233378 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrgs"] Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.091471 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a61be49f-b67b-4cd9-8790-12fe7dfde50b","Type":"ContainerStarted","Data":"c1f8805560e9c9af4a10ab1ca16ca66bd838e017eb26760471f617063dc680bd"} Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.093638 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrgs" event={"ID":"5656c8dd-d36b-48ae-a272-01f789cf280d","Type":"ContainerStarted","Data":"513dfaf8e21e3fdf52b17d1c804c70d66940f31d4d712bebaa85e068d0aa34ba"} Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.093679 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrgs" event={"ID":"5656c8dd-d36b-48ae-a272-01f789cf280d","Type":"ContainerStarted","Data":"4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6"} Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.112670 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-cell1-cell-mapping-rkrgs" podStartSLOduration=2.112618596 podStartE2EDuration="2.112618596s" podCreationTimestamp="2025-12-05 17:54:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:54:07.110448732 +0000 UTC m=+1253.171599225" watchObservedRunningTime="2025-12-05 17:54:07.112618596 +0000 UTC m=+1253.173769069" Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.547681 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.642488 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-wcz6s"] Dec 05 17:54:07 crc kubenswrapper[4961]: I1205 17:54:07.642713 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerName="dnsmasq-dns" containerID="cri-o://d8fc8ca51d295bba0225ad203c4bd58dadcc4898088dc29267e35c25fd830d0f" gracePeriod=10 Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.127262 4961 generic.go:334] "Generic (PLEG): container finished" podID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerID="d8fc8ca51d295bba0225ad203c4bd58dadcc4898088dc29267e35c25fd830d0f" exitCode=0 Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.127311 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" event={"ID":"6dfc20b2-940f-46c8-bcaa-e9e209f92bed","Type":"ContainerDied","Data":"d8fc8ca51d295bba0225ad203c4bd58dadcc4898088dc29267e35c25fd830d0f"} Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.134985 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a61be49f-b67b-4cd9-8790-12fe7dfde50b","Type":"ContainerStarted","Data":"9aa1b27ad1883acde2c9f9079542ecc18df12e04c9bb62877c62607609573d70"} Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.331976 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.479190 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-config\") pod \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.479380 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xn9j4\" (UniqueName: \"kubernetes.io/projected/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-kube-api-access-xn9j4\") pod \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.479498 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-svc\") pod \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.479721 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-swift-storage-0\") pod \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.480154 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-nb\") pod \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.480293 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-sb\") pod \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\" (UID: \"6dfc20b2-940f-46c8-bcaa-e9e209f92bed\") " Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.485900 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-kube-api-access-xn9j4" (OuterVolumeSpecName: "kube-api-access-xn9j4") pod "6dfc20b2-940f-46c8-bcaa-e9e209f92bed" (UID: "6dfc20b2-940f-46c8-bcaa-e9e209f92bed"). InnerVolumeSpecName "kube-api-access-xn9j4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.549204 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "6dfc20b2-940f-46c8-bcaa-e9e209f92bed" (UID: "6dfc20b2-940f-46c8-bcaa-e9e209f92bed"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.549527 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "6dfc20b2-940f-46c8-bcaa-e9e209f92bed" (UID: "6dfc20b2-940f-46c8-bcaa-e9e209f92bed"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.552430 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-config" (OuterVolumeSpecName: "config") pod "6dfc20b2-940f-46c8-bcaa-e9e209f92bed" (UID: "6dfc20b2-940f-46c8-bcaa-e9e209f92bed"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.567456 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "6dfc20b2-940f-46c8-bcaa-e9e209f92bed" (UID: "6dfc20b2-940f-46c8-bcaa-e9e209f92bed"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.568301 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "6dfc20b2-940f-46c8-bcaa-e9e209f92bed" (UID: "6dfc20b2-940f-46c8-bcaa-e9e209f92bed"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.583512 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xn9j4\" (UniqueName: \"kubernetes.io/projected/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-kube-api-access-xn9j4\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.583561 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.583573 4961 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.583584 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.583595 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:08 crc kubenswrapper[4961]: I1205 17:54:08.583606 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6dfc20b2-940f-46c8-bcaa-e9e209f92bed-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.150232 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ceilometer-0" event={"ID":"a61be49f-b67b-4cd9-8790-12fe7dfde50b","Type":"ContainerStarted","Data":"f03459c3de587728cd88d599423da18180490656982b02cb1bab3110932256f4"} Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.151302 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/ceilometer-0" Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.153006 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" event={"ID":"6dfc20b2-940f-46c8-bcaa-e9e209f92bed","Type":"ContainerDied","Data":"46cfc46aacd5832b80510d97b46251ed0304f0d4ffb3bdd7d9ff668f529ccac1"} Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.153045 4961 scope.go:117] "RemoveContainer" containerID="d8fc8ca51d295bba0225ad203c4bd58dadcc4898088dc29267e35c25fd830d0f" Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.153173 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-865f5d856f-wcz6s" Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.186783 4961 scope.go:117] "RemoveContainer" containerID="34d06768645ab6c9b378589d84f52d001f23974059838a58d9262de6040a8d6d" Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.198133 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ceilometer-0" podStartSLOduration=2.086950393 podStartE2EDuration="5.198113661s" podCreationTimestamp="2025-12-05 17:54:04 +0000 UTC" firstStartedPulling="2025-12-05 17:54:04.932698798 +0000 UTC m=+1250.993849271" lastFinishedPulling="2025-12-05 17:54:08.043862066 +0000 UTC m=+1254.105012539" observedRunningTime="2025-12-05 17:54:09.181937189 +0000 UTC m=+1255.243087662" watchObservedRunningTime="2025-12-05 17:54:09.198113661 +0000 UTC m=+1255.259264134" Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.223958 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-wcz6s"] Dec 05 17:54:09 crc kubenswrapper[4961]: I1205 17:54:09.232156 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-865f5d856f-wcz6s"] Dec 05 17:54:10 crc kubenswrapper[4961]: I1205 17:54:10.887107 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" path="/var/lib/kubelet/pods/6dfc20b2-940f-46c8-bcaa-e9e209f92bed/volumes" Dec 05 17:54:13 crc kubenswrapper[4961]: I1205 17:54:13.230838 4961 generic.go:334] "Generic (PLEG): container finished" podID="5656c8dd-d36b-48ae-a272-01f789cf280d" containerID="513dfaf8e21e3fdf52b17d1c804c70d66940f31d4d712bebaa85e068d0aa34ba" exitCode=0 Dec 05 17:54:13 crc kubenswrapper[4961]: I1205 17:54:13.231191 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrgs" event={"ID":"5656c8dd-d36b-48ae-a272-01f789cf280d","Type":"ContainerDied","Data":"513dfaf8e21e3fdf52b17d1c804c70d66940f31d4d712bebaa85e068d0aa34ba"} Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.620649 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.701904 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-combined-ca-bundle\") pod \"5656c8dd-d36b-48ae-a272-01f789cf280d\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.702492 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-config-data\") pod \"5656c8dd-d36b-48ae-a272-01f789cf280d\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.702574 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-scripts\") pod \"5656c8dd-d36b-48ae-a272-01f789cf280d\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.702724 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q8t2r\" (UniqueName: \"kubernetes.io/projected/5656c8dd-d36b-48ae-a272-01f789cf280d-kube-api-access-q8t2r\") pod \"5656c8dd-d36b-48ae-a272-01f789cf280d\" (UID: \"5656c8dd-d36b-48ae-a272-01f789cf280d\") " Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.711210 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5656c8dd-d36b-48ae-a272-01f789cf280d-kube-api-access-q8t2r" (OuterVolumeSpecName: "kube-api-access-q8t2r") pod "5656c8dd-d36b-48ae-a272-01f789cf280d" (UID: "5656c8dd-d36b-48ae-a272-01f789cf280d"). InnerVolumeSpecName "kube-api-access-q8t2r". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.721661 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-scripts" (OuterVolumeSpecName: "scripts") pod "5656c8dd-d36b-48ae-a272-01f789cf280d" (UID: "5656c8dd-d36b-48ae-a272-01f789cf280d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.735804 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-config-data" (OuterVolumeSpecName: "config-data") pod "5656c8dd-d36b-48ae-a272-01f789cf280d" (UID: "5656c8dd-d36b-48ae-a272-01f789cf280d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.739013 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "5656c8dd-d36b-48ae-a272-01f789cf280d" (UID: "5656c8dd-d36b-48ae-a272-01f789cf280d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.767285 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.767332 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.805320 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.805593 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.805670 4961 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5656c8dd-d36b-48ae-a272-01f789cf280d-scripts\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:14 crc kubenswrapper[4961]: I1205 17:54:14.805754 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q8t2r\" (UniqueName: \"kubernetes.io/projected/5656c8dd-d36b-48ae-a272-01f789cf280d-kube-api-access-q8t2r\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.254439 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-cell1-cell-mapping-rkrgs" event={"ID":"5656c8dd-d36b-48ae-a272-01f789cf280d","Type":"ContainerDied","Data":"4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6"} Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.254485 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6" Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.254494 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-cell1-cell-mapping-rkrgs" Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.440916 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.441537 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-api" containerID="cri-o://7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78" gracePeriod=30 Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.442047 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-api-0" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-log" containerID="cri-o://943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987" gracePeriod=30 Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.448560 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.448715 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.200:8774/\": EOF" Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.525188 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.526069 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-scheduler-0" podUID="c53690af-342f-487b-81f6-c9d9d03ba6c0" containerName="nova-scheduler-scheduler" containerID="cri-o://1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e" gracePeriod=30 Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.535959 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.536224 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-log" containerID="cri-o://2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931" gracePeriod=30 Dec 05 17:54:15 crc kubenswrapper[4961]: I1205 17:54:15.536556 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/nova-metadata-0" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-metadata" containerID="cri-o://7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6" gracePeriod=30 Dec 05 17:54:16 crc kubenswrapper[4961]: I1205 17:54:16.266486 4961 generic.go:334] "Generic (PLEG): container finished" podID="594e1bb5-120e-4b26-909d-b8913c43e670" containerID="2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931" exitCode=143 Dec 05 17:54:16 crc kubenswrapper[4961]: I1205 17:54:16.266518 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"594e1bb5-120e-4b26-909d-b8913c43e670","Type":"ContainerDied","Data":"2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931"} Dec 05 17:54:16 crc kubenswrapper[4961]: I1205 17:54:16.269637 4961 generic.go:334] "Generic (PLEG): container finished" podID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerID="943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987" exitCode=143 Dec 05 17:54:16 crc kubenswrapper[4961]: I1205 17:54:16.269671 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ed08e008-fa51-488f-86dd-d9b81d1572f5","Type":"ContainerDied","Data":"943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987"} Dec 05 17:54:16 crc kubenswrapper[4961]: E1205 17:54:16.319901 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 17:54:16 crc kubenswrapper[4961]: E1205 17:54:16.321993 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 17:54:16 crc kubenswrapper[4961]: E1205 17:54:16.324312 4961 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e" cmd=["/usr/bin/pgrep","-r","DRST","nova-scheduler"] Dec 05 17:54:16 crc kubenswrapper[4961]: E1205 17:54:16.324351 4961 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openstack/nova-scheduler-0" podUID="c53690af-342f-487b-81f6-c9d9d03ba6c0" containerName="nova-scheduler-scheduler" Dec 05 17:54:18 crc kubenswrapper[4961]: I1205 17:54:18.688340 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:57326->10.217.0.191:8775: read: connection reset by peer" Dec 05 17:54:18 crc kubenswrapper[4961]: I1205 17:54:18.688366 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/nova-metadata-0" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.191:8775/\": read tcp 10.217.0.2:57342->10.217.0.191:8775: read: connection reset by peer" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.152439 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.295692 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-combined-ca-bundle\") pod \"594e1bb5-120e-4b26-909d-b8913c43e670\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.295743 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sdztl\" (UniqueName: \"kubernetes.io/projected/594e1bb5-120e-4b26-909d-b8913c43e670-kube-api-access-sdztl\") pod \"594e1bb5-120e-4b26-909d-b8913c43e670\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.295762 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-nova-metadata-tls-certs\") pod \"594e1bb5-120e-4b26-909d-b8913c43e670\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.295891 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-config-data\") pod \"594e1bb5-120e-4b26-909d-b8913c43e670\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.295952 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/594e1bb5-120e-4b26-909d-b8913c43e670-logs\") pod \"594e1bb5-120e-4b26-909d-b8913c43e670\" (UID: \"594e1bb5-120e-4b26-909d-b8913c43e670\") " Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.296630 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/594e1bb5-120e-4b26-909d-b8913c43e670-logs" (OuterVolumeSpecName: "logs") pod "594e1bb5-120e-4b26-909d-b8913c43e670" (UID: "594e1bb5-120e-4b26-909d-b8913c43e670"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.301529 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/594e1bb5-120e-4b26-909d-b8913c43e670-kube-api-access-sdztl" (OuterVolumeSpecName: "kube-api-access-sdztl") pod "594e1bb5-120e-4b26-909d-b8913c43e670" (UID: "594e1bb5-120e-4b26-909d-b8913c43e670"). InnerVolumeSpecName "kube-api-access-sdztl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.304369 4961 generic.go:334] "Generic (PLEG): container finished" podID="594e1bb5-120e-4b26-909d-b8913c43e670" containerID="7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6" exitCode=0 Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.304464 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"594e1bb5-120e-4b26-909d-b8913c43e670","Type":"ContainerDied","Data":"7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6"} Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.304501 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"594e1bb5-120e-4b26-909d-b8913c43e670","Type":"ContainerDied","Data":"823414a084f03dd059d3ecd3171a16486bf2e0abe87da9a07bce3154a197a84f"} Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.304524 4961 scope.go:117] "RemoveContainer" containerID="7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.304700 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.331702 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-config-data" (OuterVolumeSpecName: "config-data") pod "594e1bb5-120e-4b26-909d-b8913c43e670" (UID: "594e1bb5-120e-4b26-909d-b8913c43e670"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.345756 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "594e1bb5-120e-4b26-909d-b8913c43e670" (UID: "594e1bb5-120e-4b26-909d-b8913c43e670"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.356351 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-nova-metadata-tls-certs" (OuterVolumeSpecName: "nova-metadata-tls-certs") pod "594e1bb5-120e-4b26-909d-b8913c43e670" (UID: "594e1bb5-120e-4b26-909d-b8913c43e670"). InnerVolumeSpecName "nova-metadata-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.397986 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.398020 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sdztl\" (UniqueName: \"kubernetes.io/projected/594e1bb5-120e-4b26-909d-b8913c43e670-kube-api-access-sdztl\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.398034 4961 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-nova-metadata-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.398048 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/594e1bb5-120e-4b26-909d-b8913c43e670-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.398062 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/594e1bb5-120e-4b26-909d-b8913c43e670-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.448857 4961 scope.go:117] "RemoveContainer" containerID="2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.479383 4961 scope.go:117] "RemoveContainer" containerID="7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6" Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.479985 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6\": container with ID starting with 7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6 not found: ID does not exist" containerID="7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.480048 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6"} err="failed to get container status \"7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6\": rpc error: code = NotFound desc = could not find container \"7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6\": container with ID starting with 7fdf1623579aa765dd33353a6378de5af3460e49ef2c8739d944a8c54cf0eca6 not found: ID does not exist" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.480082 4961 scope.go:117] "RemoveContainer" containerID="2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931" Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.480704 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931\": container with ID starting with 2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931 not found: ID does not exist" containerID="2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.480747 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931"} err="failed to get container status \"2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931\": rpc error: code = NotFound desc = could not find container \"2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931\": container with ID starting with 2b2930f167730a4952215ebcfd2bb7a763e61f40e6b66ca6890daf37cc598931 not found: ID does not exist" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.647296 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.672758 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.686239 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.686773 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerName="init" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.686814 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerName="init" Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.686835 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5656c8dd-d36b-48ae-a272-01f789cf280d" containerName="nova-manage" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.686844 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5656c8dd-d36b-48ae-a272-01f789cf280d" containerName="nova-manage" Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.686859 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-log" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.686866 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-log" Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.686890 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerName="dnsmasq-dns" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.686899 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerName="dnsmasq-dns" Dec 05 17:54:19 crc kubenswrapper[4961]: E1205 17:54:19.686925 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-metadata" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.686934 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-metadata" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.687202 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-log" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.687235 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5656c8dd-d36b-48ae-a272-01f789cf280d" containerName="nova-manage" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.687258 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6dfc20b2-940f-46c8-bcaa-e9e209f92bed" containerName="dnsmasq-dns" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.687267 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" containerName="nova-metadata-metadata" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.688829 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.692718 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-metadata-internal-svc" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.692950 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-config-data" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.697590 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.804370 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.804447 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-config-data\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.804513 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mklss\" (UniqueName: \"kubernetes.io/projected/56cc0cd5-a044-49c6-946c-82e56b2c4d57-kube-api-access-mklss\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.804573 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56cc0cd5-a044-49c6-946c-82e56b2c4d57-logs\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.804670 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.906585 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.906693 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.906740 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-config-data\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.906846 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mklss\" (UniqueName: \"kubernetes.io/projected/56cc0cd5-a044-49c6-946c-82e56b2c4d57-kube-api-access-mklss\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.907121 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56cc0cd5-a044-49c6-946c-82e56b2c4d57-logs\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.908076 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/56cc0cd5-a044-49c6-946c-82e56b2c4d57-logs\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.912173 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-tls-certs\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-nova-metadata-tls-certs\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.918693 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-config-data\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.921388 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/56cc0cd5-a044-49c6-946c-82e56b2c4d57-combined-ca-bundle\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:19 crc kubenswrapper[4961]: I1205 17:54:19.927475 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mklss\" (UniqueName: \"kubernetes.io/projected/56cc0cd5-a044-49c6-946c-82e56b2c4d57-kube-api-access-mklss\") pod \"nova-metadata-0\" (UID: \"56cc0cd5-a044-49c6-946c-82e56b2c4d57\") " pod="openstack/nova-metadata-0" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.010964 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-metadata-0" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.319333 4961 generic.go:334] "Generic (PLEG): container finished" podID="c53690af-342f-487b-81f6-c9d9d03ba6c0" containerID="1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e" exitCode=0 Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.319594 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c53690af-342f-487b-81f6-c9d9d03ba6c0","Type":"ContainerDied","Data":"1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e"} Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.431828 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-metadata-0"] Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.527857 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.719585 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-combined-ca-bundle\") pod \"c53690af-342f-487b-81f6-c9d9d03ba6c0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.720135 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d75hs\" (UniqueName: \"kubernetes.io/projected/c53690af-342f-487b-81f6-c9d9d03ba6c0-kube-api-access-d75hs\") pod \"c53690af-342f-487b-81f6-c9d9d03ba6c0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.720170 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-config-data\") pod \"c53690af-342f-487b-81f6-c9d9d03ba6c0\" (UID: \"c53690af-342f-487b-81f6-c9d9d03ba6c0\") " Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.726217 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c53690af-342f-487b-81f6-c9d9d03ba6c0-kube-api-access-d75hs" (OuterVolumeSpecName: "kube-api-access-d75hs") pod "c53690af-342f-487b-81f6-c9d9d03ba6c0" (UID: "c53690af-342f-487b-81f6-c9d9d03ba6c0"). InnerVolumeSpecName "kube-api-access-d75hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.755640 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-config-data" (OuterVolumeSpecName: "config-data") pod "c53690af-342f-487b-81f6-c9d9d03ba6c0" (UID: "c53690af-342f-487b-81f6-c9d9d03ba6c0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.771243 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c53690af-342f-487b-81f6-c9d9d03ba6c0" (UID: "c53690af-342f-487b-81f6-c9d9d03ba6c0"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.826944 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d75hs\" (UniqueName: \"kubernetes.io/projected/c53690af-342f-487b-81f6-c9d9d03ba6c0-kube-api-access-d75hs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.827165 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.827251 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c53690af-342f-487b-81f6-c9d9d03ba6c0-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:20 crc kubenswrapper[4961]: I1205 17:54:20.874630 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="594e1bb5-120e-4b26-909d-b8913c43e670" path="/var/lib/kubelet/pods/594e1bb5-120e-4b26-909d-b8913c43e670/volumes" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.230255 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.329712 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"c53690af-342f-487b-81f6-c9d9d03ba6c0","Type":"ContainerDied","Data":"937447da8e8ac2e3b3d723212ea0537e95da230b48ed5275686f21afdb506e85"} Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.329770 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.330082 4961 scope.go:117] "RemoveContainer" containerID="1613e0fb2a4af24d550997ec52461e44f2aa6ace1c5a2355e2b5a79ba5229c9e" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.331811 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56cc0cd5-a044-49c6-946c-82e56b2c4d57","Type":"ContainerStarted","Data":"5941ce6f4277e0e93239ec0253a88ecd50ae7eacce7b969e4e628a076d302598"} Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.331875 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56cc0cd5-a044-49c6-946c-82e56b2c4d57","Type":"ContainerStarted","Data":"f8694f8a2be42743491899d21faee59c8a44fbb0f40616e0a392cf2ea5360598"} Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.331889 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-metadata-0" event={"ID":"56cc0cd5-a044-49c6-946c-82e56b2c4d57","Type":"ContainerStarted","Data":"2c63f245d54d0ccb6f4031b55e5dc2c439dd81a3901140a3a878f303262588e6"} Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.334897 4961 generic.go:334] "Generic (PLEG): container finished" podID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerID="7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78" exitCode=0 Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.334934 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ed08e008-fa51-488f-86dd-d9b81d1572f5","Type":"ContainerDied","Data":"7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78"} Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.334977 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"ed08e008-fa51-488f-86dd-d9b81d1572f5","Type":"ContainerDied","Data":"7c1307b74782ff5f41eb0a024856067c7c4c1bf1357e751ba0bb892e83adc16a"} Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.335036 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.338893 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed08e008-fa51-488f-86dd-d9b81d1572f5-logs\") pod \"ed08e008-fa51-488f-86dd-d9b81d1572f5\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.338952 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-internal-tls-certs\") pod \"ed08e008-fa51-488f-86dd-d9b81d1572f5\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.338988 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sth2h\" (UniqueName: \"kubernetes.io/projected/ed08e008-fa51-488f-86dd-d9b81d1572f5-kube-api-access-sth2h\") pod \"ed08e008-fa51-488f-86dd-d9b81d1572f5\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.339117 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-config-data\") pod \"ed08e008-fa51-488f-86dd-d9b81d1572f5\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.339417 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ed08e008-fa51-488f-86dd-d9b81d1572f5-logs" (OuterVolumeSpecName: "logs") pod "ed08e008-fa51-488f-86dd-d9b81d1572f5" (UID: "ed08e008-fa51-488f-86dd-d9b81d1572f5"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.339442 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-public-tls-certs\") pod \"ed08e008-fa51-488f-86dd-d9b81d1572f5\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.339530 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-combined-ca-bundle\") pod \"ed08e008-fa51-488f-86dd-d9b81d1572f5\" (UID: \"ed08e008-fa51-488f-86dd-d9b81d1572f5\") " Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.339964 4961 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/ed08e008-fa51-488f-86dd-d9b81d1572f5-logs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.361650 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.363599 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ed08e008-fa51-488f-86dd-d9b81d1572f5-kube-api-access-sth2h" (OuterVolumeSpecName: "kube-api-access-sth2h") pod "ed08e008-fa51-488f-86dd-d9b81d1572f5" (UID: "ed08e008-fa51-488f-86dd-d9b81d1572f5"). InnerVolumeSpecName "kube-api-access-sth2h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.374175 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.379273 4961 scope.go:117] "RemoveContainer" containerID="7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.390549 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: E1205 17:54:21.391042 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-log" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.391061 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-log" Dec 05 17:54:21 crc kubenswrapper[4961]: E1205 17:54:21.391082 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-api" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.391090 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-api" Dec 05 17:54:21 crc kubenswrapper[4961]: E1205 17:54:21.391120 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c53690af-342f-487b-81f6-c9d9d03ba6c0" containerName="nova-scheduler-scheduler" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.391127 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c53690af-342f-487b-81f6-c9d9d03ba6c0" containerName="nova-scheduler-scheduler" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.391335 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c53690af-342f-487b-81f6-c9d9d03ba6c0" containerName="nova-scheduler-scheduler" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.391357 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-log" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.391366 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" containerName="nova-api-api" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.394759 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.408325 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-scheduler-config-data" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.412402 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-config-data" (OuterVolumeSpecName: "config-data") pod "ed08e008-fa51-488f-86dd-d9b81d1572f5" (UID: "ed08e008-fa51-488f-86dd-d9b81d1572f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.413427 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "ed08e008-fa51-488f-86dd-d9b81d1572f5" (UID: "ed08e008-fa51-488f-86dd-d9b81d1572f5"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.414425 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "ed08e008-fa51-488f-86dd-d9b81d1572f5" (UID: "ed08e008-fa51-488f-86dd-d9b81d1572f5"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.416359 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ed08e008-fa51-488f-86dd-d9b81d1572f5" (UID: "ed08e008-fa51-488f-86dd-d9b81d1572f5"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.419592 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.419925 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-metadata-0" podStartSLOduration=2.419864198 podStartE2EDuration="2.419864198s" podCreationTimestamp="2025-12-05 17:54:19 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:54:21.378607663 +0000 UTC m=+1267.439758146" watchObservedRunningTime="2025-12-05 17:54:21.419864198 +0000 UTC m=+1267.481014671" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.442016 4961 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.442058 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sth2h\" (UniqueName: \"kubernetes.io/projected/ed08e008-fa51-488f-86dd-d9b81d1572f5-kube-api-access-sth2h\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.442071 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.442080 4961 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-public-tls-certs\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.442090 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ed08e008-fa51-488f-86dd-d9b81d1572f5-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.446990 4961 scope.go:117] "RemoveContainer" containerID="943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.467194 4961 scope.go:117] "RemoveContainer" containerID="7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78" Dec 05 17:54:21 crc kubenswrapper[4961]: E1205 17:54:21.467648 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78\": container with ID starting with 7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78 not found: ID does not exist" containerID="7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.467706 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78"} err="failed to get container status \"7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78\": rpc error: code = NotFound desc = could not find container \"7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78\": container with ID starting with 7fe66960f46993a8209137c33fda5c13cef5f72a3055bb08cdffcea76b317a78 not found: ID does not exist" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.467741 4961 scope.go:117] "RemoveContainer" containerID="943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987" Dec 05 17:54:21 crc kubenswrapper[4961]: E1205 17:54:21.468173 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987\": container with ID starting with 943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987 not found: ID does not exist" containerID="943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.468212 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987"} err="failed to get container status \"943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987\": rpc error: code = NotFound desc = could not find container \"943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987\": container with ID starting with 943af21f4f794f9f3bfe775c37f219fa600ff5e6fddd43aa966ae3e8ba58d987 not found: ID does not exist" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.543635 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.543687 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-config-data\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.543709 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxlcc\" (UniqueName: \"kubernetes.io/projected/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-kube-api-access-fxlcc\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.645271 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.645358 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-config-data\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.645388 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxlcc\" (UniqueName: \"kubernetes.io/projected/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-kube-api-access-fxlcc\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.648730 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-config-data\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.649253 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-combined-ca-bundle\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.667262 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxlcc\" (UniqueName: \"kubernetes.io/projected/839ae7fd-5c5c-4767-b0d8-c7f24f17b03b-kube-api-access-fxlcc\") pod \"nova-scheduler-0\" (UID: \"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b\") " pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.667288 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.681939 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.693534 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.695198 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.699461 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-internal-svc" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.699466 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-api-config-data" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.699579 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-nova-public-svc" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.704191 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.747369 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-scheduler-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.848816 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.849384 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-public-tls-certs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.849468 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-config-data\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.849541 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtcvd\" (UniqueName: \"kubernetes.io/projected/258d7583-7072-4621-8490-c0bfcc91abff-kube-api-access-mtcvd\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.849570 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/258d7583-7072-4621-8490-c0bfcc91abff-logs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.849638 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-internal-tls-certs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.951813 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.951870 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-public-tls-certs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.951940 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-config-data\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.951991 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtcvd\" (UniqueName: \"kubernetes.io/projected/258d7583-7072-4621-8490-c0bfcc91abff-kube-api-access-mtcvd\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.952010 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/258d7583-7072-4621-8490-c0bfcc91abff-logs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.952062 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-internal-tls-certs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.954440 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/258d7583-7072-4621-8490-c0bfcc91abff-logs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.955606 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-combined-ca-bundle\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.956037 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-config-data\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.956281 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-public-tls-certs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.957080 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/258d7583-7072-4621-8490-c0bfcc91abff-internal-tls-certs\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:21 crc kubenswrapper[4961]: I1205 17:54:21.968995 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtcvd\" (UniqueName: \"kubernetes.io/projected/258d7583-7072-4621-8490-c0bfcc91abff-kube-api-access-mtcvd\") pod \"nova-api-0\" (UID: \"258d7583-7072-4621-8490-c0bfcc91abff\") " pod="openstack/nova-api-0" Dec 05 17:54:22 crc kubenswrapper[4961]: I1205 17:54:22.019099 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-api-0" Dec 05 17:54:22 crc kubenswrapper[4961]: I1205 17:54:22.178964 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-scheduler-0"] Dec 05 17:54:22 crc kubenswrapper[4961]: W1205 17:54:22.190984 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod839ae7fd_5c5c_4767_b0d8_c7f24f17b03b.slice/crio-13e96a263b7e9b67d449c83e9372f0e7c45d5b81d29bb49e2bb440416e73bc7d WatchSource:0}: Error finding container 13e96a263b7e9b67d449c83e9372f0e7c45d5b81d29bb49e2bb440416e73bc7d: Status 404 returned error can't find the container with id 13e96a263b7e9b67d449c83e9372f0e7c45d5b81d29bb49e2bb440416e73bc7d Dec 05 17:54:22 crc kubenswrapper[4961]: I1205 17:54:22.345654 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b","Type":"ContainerStarted","Data":"13e96a263b7e9b67d449c83e9372f0e7c45d5b81d29bb49e2bb440416e73bc7d"} Dec 05 17:54:22 crc kubenswrapper[4961]: I1205 17:54:22.498724 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-api-0"] Dec 05 17:54:22 crc kubenswrapper[4961]: W1205 17:54:22.501261 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod258d7583_7072_4621_8490_c0bfcc91abff.slice/crio-4f809dcfbc2606aa9ad6167fa65086da4bca467f61ea8604e46dba3d6e3d932f WatchSource:0}: Error finding container 4f809dcfbc2606aa9ad6167fa65086da4bca467f61ea8604e46dba3d6e3d932f: Status 404 returned error can't find the container with id 4f809dcfbc2606aa9ad6167fa65086da4bca467f61ea8604e46dba3d6e3d932f Dec 05 17:54:22 crc kubenswrapper[4961]: I1205 17:54:22.875701 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c53690af-342f-487b-81f6-c9d9d03ba6c0" path="/var/lib/kubelet/pods/c53690af-342f-487b-81f6-c9d9d03ba6c0/volumes" Dec 05 17:54:22 crc kubenswrapper[4961]: I1205 17:54:22.876719 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ed08e008-fa51-488f-86dd-d9b81d1572f5" path="/var/lib/kubelet/pods/ed08e008-fa51-488f-86dd-d9b81d1572f5/volumes" Dec 05 17:54:23 crc kubenswrapper[4961]: I1205 17:54:23.358302 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-scheduler-0" event={"ID":"839ae7fd-5c5c-4767-b0d8-c7f24f17b03b","Type":"ContainerStarted","Data":"cc95b615398a0f94ac391d435b4ec1dfe99c74520a5e2d1e49c9782764fe6606"} Dec 05 17:54:23 crc kubenswrapper[4961]: I1205 17:54:23.362424 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"258d7583-7072-4621-8490-c0bfcc91abff","Type":"ContainerStarted","Data":"a47a97405f1735b99272eaac127e18881369b491cee2eecdb18cbcd9fbfdc60e"} Dec 05 17:54:23 crc kubenswrapper[4961]: I1205 17:54:23.362464 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"258d7583-7072-4621-8490-c0bfcc91abff","Type":"ContainerStarted","Data":"cb5f7c13bf50e3be1ef56c1658684204bea1c894cd7a16027ab7c7e9df026647"} Dec 05 17:54:23 crc kubenswrapper[4961]: I1205 17:54:23.362476 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-api-0" event={"ID":"258d7583-7072-4621-8490-c0bfcc91abff","Type":"ContainerStarted","Data":"4f809dcfbc2606aa9ad6167fa65086da4bca467f61ea8604e46dba3d6e3d932f"} Dec 05 17:54:23 crc kubenswrapper[4961]: I1205 17:54:23.378335 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-scheduler-0" podStartSLOduration=2.378318428 podStartE2EDuration="2.378318428s" podCreationTimestamp="2025-12-05 17:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:54:23.373522919 +0000 UTC m=+1269.434673412" watchObservedRunningTime="2025-12-05 17:54:23.378318428 +0000 UTC m=+1269.439468891" Dec 05 17:54:23 crc kubenswrapper[4961]: I1205 17:54:23.411515 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-api-0" podStartSLOduration=2.411489321 podStartE2EDuration="2.411489321s" podCreationTimestamp="2025-12-05 17:54:21 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:54:23.40092381 +0000 UTC m=+1269.462074313" watchObservedRunningTime="2025-12-05 17:54:23.411489321 +0000 UTC m=+1269.472639804" Dec 05 17:54:24 crc kubenswrapper[4961]: E1205 17:54:24.349369 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5656c8dd_d36b_48ae_a272_01f789cf280d.slice/crio-4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6\": RecentStats: unable to find data in memory cache]" Dec 05 17:54:25 crc kubenswrapper[4961]: I1205 17:54:25.012294 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:54:25 crc kubenswrapper[4961]: I1205 17:54:25.012352 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-metadata-0" Dec 05 17:54:26 crc kubenswrapper[4961]: I1205 17:54:26.748021 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-scheduler-0" Dec 05 17:54:30 crc kubenswrapper[4961]: I1205 17:54:30.012205 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:54:30 crc kubenswrapper[4961]: I1205 17:54:30.012723 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-metadata-0" Dec 05 17:54:31 crc kubenswrapper[4961]: I1205 17:54:31.027057 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="56cc0cd5-a044-49c6-946c-82e56b2c4d57" containerName="nova-metadata-log" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:54:31 crc kubenswrapper[4961]: I1205 17:54:31.027052 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-metadata-0" podUID="56cc0cd5-a044-49c6-946c-82e56b2c4d57" containerName="nova-metadata-metadata" probeResult="failure" output="Get \"https://10.217.0.202:8775/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:54:31 crc kubenswrapper[4961]: I1205 17:54:31.748025 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-scheduler-0" Dec 05 17:54:31 crc kubenswrapper[4961]: I1205 17:54:31.782234 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-scheduler-0" Dec 05 17:54:32 crc kubenswrapper[4961]: I1205 17:54:32.019703 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:54:32 crc kubenswrapper[4961]: I1205 17:54:32.019793 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack/nova-api-0" Dec 05 17:54:32 crc kubenswrapper[4961]: I1205 17:54:32.469616 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-scheduler-0" Dec 05 17:54:33 crc kubenswrapper[4961]: I1205 17:54:33.035097 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="258d7583-7072-4621-8490-c0bfcc91abff" containerName="nova-api-log" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:54:33 crc kubenswrapper[4961]: I1205 17:54:33.035136 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openstack/nova-api-0" podUID="258d7583-7072-4621-8490-c0bfcc91abff" containerName="nova-api-api" probeResult="failure" output="Get \"https://10.217.0.204:8774/\": net/http: request canceled (Client.Timeout exceeded while awaiting headers)" Dec 05 17:54:34 crc kubenswrapper[4961]: I1205 17:54:34.471743 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/ceilometer-0" Dec 05 17:54:34 crc kubenswrapper[4961]: E1205 17:54:34.621221 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5656c8dd_d36b_48ae_a272_01f789cf280d.slice/crio-4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6\": RecentStats: unable to find data in memory cache]" Dec 05 17:54:40 crc kubenswrapper[4961]: I1205 17:54:40.018145 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:54:40 crc kubenswrapper[4961]: I1205 17:54:40.024269 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-metadata-0" Dec 05 17:54:40 crc kubenswrapper[4961]: I1205 17:54:40.026016 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:54:40 crc kubenswrapper[4961]: I1205 17:54:40.530303 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-metadata-0" Dec 05 17:54:42 crc kubenswrapper[4961]: I1205 17:54:42.029807 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:54:42 crc kubenswrapper[4961]: I1205 17:54:42.031760 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:54:42 crc kubenswrapper[4961]: I1205 17:54:42.039531 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack/nova-api-0" Dec 05 17:54:42 crc kubenswrapper[4961]: I1205 17:54:42.041195 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:54:42 crc kubenswrapper[4961]: I1205 17:54:42.538905 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/nova-api-0" Dec 05 17:54:42 crc kubenswrapper[4961]: I1205 17:54:42.544561 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/nova-api-0" Dec 05 17:54:44 crc kubenswrapper[4961]: E1205 17:54:44.863460 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5656c8dd_d36b_48ae_a272_01f789cf280d.slice/crio-4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6\": RecentStats: unable to find data in memory cache]" Dec 05 17:54:50 crc kubenswrapper[4961]: I1205 17:54:50.497801 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:54:51 crc kubenswrapper[4961]: I1205 17:54:51.571768 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:54:55 crc kubenswrapper[4961]: E1205 17:54:55.106260 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5656c8dd_d36b_48ae_a272_01f789cf280d.slice/crio-4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6\": RecentStats: unable to find data in memory cache]" Dec 05 17:54:55 crc kubenswrapper[4961]: I1205 17:54:55.266399 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-server-0" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="rabbitmq" containerID="cri-o://da4f23365e5ecafdc73b5143c8bd174d0469c2a2473dc2c85cccd597fef41bdc" gracePeriod=604796 Dec 05 17:54:56 crc kubenswrapper[4961]: I1205 17:54:56.088114 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/rabbitmq-cell1-server-0" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="rabbitmq" containerID="cri-o://308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5" gracePeriod=604796 Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.237264 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-server-0" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.99:5671: connect: connection refused" Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.545138 4961 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack/rabbitmq-cell1-server-0" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="rabbitmq" probeResult="failure" output="dial tcp 10.217.0.100:5671: connect: connection refused" Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.716608 4961 generic.go:334] "Generic (PLEG): container finished" podID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerID="da4f23365e5ecafdc73b5143c8bd174d0469c2a2473dc2c85cccd597fef41bdc" exitCode=0 Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.716659 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"346da897-3e71-4d6f-b17d-fe5f905dd705","Type":"ContainerDied","Data":"da4f23365e5ecafdc73b5143c8bd174d0469c2a2473dc2c85cccd597fef41bdc"} Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.868953 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993074 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-config-data\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993118 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-server-conf\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993138 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-confd\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993178 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-tls\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993206 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qbtnq\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-kube-api-access-qbtnq\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993252 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993401 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-plugins\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993417 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/346da897-3e71-4d6f-b17d-fe5f905dd705-erlang-cookie-secret\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993445 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-plugins-conf\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993498 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/346da897-3e71-4d6f-b17d-fe5f905dd705-pod-info\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.993521 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-erlang-cookie\") pod \"346da897-3e71-4d6f-b17d-fe5f905dd705\" (UID: \"346da897-3e71-4d6f-b17d-fe5f905dd705\") " Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.995403 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.996141 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:55:01 crc kubenswrapper[4961]: I1205 17:55:01.996185 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.000761 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "persistence") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.001478 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/346da897-3e71-4d6f-b17d-fe5f905dd705-pod-info" (OuterVolumeSpecName: "pod-info") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.002825 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-kube-api-access-qbtnq" (OuterVolumeSpecName: "kube-api-access-qbtnq") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "kube-api-access-qbtnq". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.004171 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/346da897-3e71-4d6f-b17d-fe5f905dd705-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.006947 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.042087 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-config-data" (OuterVolumeSpecName: "config-data") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.052169 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-server-conf" (OuterVolumeSpecName: "server-conf") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095361 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095392 4961 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/346da897-3e71-4d6f-b17d-fe5f905dd705-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095402 4961 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095409 4961 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/346da897-3e71-4d6f-b17d-fe5f905dd705-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095418 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095428 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095437 4961 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/346da897-3e71-4d6f-b17d-fe5f905dd705-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095445 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095456 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qbtnq\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-kube-api-access-qbtnq\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.095477 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.119885 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.146970 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "346da897-3e71-4d6f-b17d-fe5f905dd705" (UID: "346da897-3e71-4d6f-b17d-fe5f905dd705"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.197361 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.197394 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/346da897-3e71-4d6f-b17d-fe5f905dd705-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.667992 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.809618 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-config-data\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.811870 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-plugins\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.812097 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5wnfm\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-kube-api-access-5wnfm\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.812211 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-server-conf\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.812370 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-plugins-conf\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.812512 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-pod-info\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.812624 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-erlang-cookie\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.812917 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-confd\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.813076 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.813235 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-erlang-cookie-secret\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.813341 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-tls\") pod \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\" (UID: \"494ff2b1-6bb3-4c8a-be81-02fe6f884caa\") " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.818187 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.820185 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.822192 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"346da897-3e71-4d6f-b17d-fe5f905dd705","Type":"ContainerDied","Data":"9aef265e29d7b76dff61bc89ef13712094236b400da739a012b8e1f204bfca11"} Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.822251 4961 scope.go:117] "RemoveContainer" containerID="da4f23365e5ecafdc73b5143c8bd174d0469c2a2473dc2c85cccd597fef41bdc" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.822456 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.830798 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.837052 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.839092 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-tls" (OuterVolumeSpecName: "rabbitmq-tls") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "rabbitmq-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.847132 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-kube-api-access-5wnfm" (OuterVolumeSpecName: "kube-api-access-5wnfm") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "kube-api-access-5wnfm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.855217 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-pod-info" (OuterVolumeSpecName: "pod-info") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.880189 4961 generic.go:334] "Generic (PLEG): container finished" podID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerID="308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5" exitCode=0 Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.880319 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.887963 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage04-crc" (OuterVolumeSpecName: "persistence") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "local-storage04-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.935324 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-config-data" (OuterVolumeSpecName: "config-data") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.939367 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" " Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951481 4961 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951508 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-tls\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951517 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951527 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5wnfm\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-kube-api-access-5wnfm\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951535 4961 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-plugins-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951544 4961 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-pod-info\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.951552 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.964510 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-server-conf" (OuterVolumeSpecName: "server-conf") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "server-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.978277 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"494ff2b1-6bb3-4c8a-be81-02fe6f884caa","Type":"ContainerDied","Data":"308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5"} Dec 05 17:55:02 crc kubenswrapper[4961]: I1205 17:55:02.978320 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"494ff2b1-6bb3-4c8a-be81-02fe6f884caa","Type":"ContainerDied","Data":"9e3d6fa198439a0af090b9ae9cf7ef5748981436ec9f32f35251f0b456a126c6"} Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.003474 4961 scope.go:117] "RemoveContainer" containerID="c802bc16e5515abf77c97c7accaacad9ade519a54f952519cddc8c8437ae18cf" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.046589 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage04-crc" (UniqueName: "kubernetes.io/local-volume/local-storage04-crc") on node "crc" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.053224 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.053248 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.053258 4961 reconciler_common.go:293] "Volume detached for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-server-conf\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.062257 4961 scope.go:117] "RemoveContainer" containerID="308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.082324 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "494ff2b1-6bb3-4c8a-be81-02fe6f884caa" (UID: "494ff2b1-6bb3-4c8a-be81-02fe6f884caa"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.102057 4961 scope.go:117] "RemoveContainer" containerID="d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.128097 4961 scope.go:117] "RemoveContainer" containerID="308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5" Dec 05 17:55:03 crc kubenswrapper[4961]: E1205 17:55:03.129150 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5\": container with ID starting with 308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5 not found: ID does not exist" containerID="308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.129287 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5"} err="failed to get container status \"308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5\": rpc error: code = NotFound desc = could not find container \"308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5\": container with ID starting with 308116382cb27e818f070b7b056141397be7f285093c2ad4ff20f0102e6836e5 not found: ID does not exist" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.129387 4961 scope.go:117] "RemoveContainer" containerID="d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43" Dec 05 17:55:03 crc kubenswrapper[4961]: E1205 17:55:03.129724 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43\": container with ID starting with d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43 not found: ID does not exist" containerID="d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.129744 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43"} err="failed to get container status \"d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43\": rpc error: code = NotFound desc = could not find container \"d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43\": container with ID starting with d993e9fbe949298b15d84342b1b58595a1705da6e498b29b5a95e7035465ac43 not found: ID does not exist" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.154546 4961 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/494ff2b1-6bb3-4c8a-be81-02fe6f884caa-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.218889 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.227804 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.249444 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:55:03 crc kubenswrapper[4961]: E1205 17:55:03.249939 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="rabbitmq" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.249963 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="rabbitmq" Dec 05 17:55:03 crc kubenswrapper[4961]: E1205 17:55:03.249986 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="setup-container" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.249994 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="setup-container" Dec 05 17:55:03 crc kubenswrapper[4961]: E1205 17:55:03.250009 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="rabbitmq" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.250017 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="rabbitmq" Dec 05 17:55:03 crc kubenswrapper[4961]: E1205 17:55:03.250032 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="setup-container" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.250040 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="setup-container" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.250272 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" containerName="rabbitmq" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.250295 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" containerName="rabbitmq" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.251668 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.255704 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-default-user" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.255978 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-server-dockercfg-rjwv6" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.256108 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-plugins-conf" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.256204 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-cell1-erlang-cookie" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.256357 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-cell1-svc" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.259250 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-config-data" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.259451 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-cell1-server-conf" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.268039 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359084 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qwmsr\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-kube-api-access-qwmsr\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359141 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359198 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359240 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359266 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359360 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359426 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359555 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359655 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359792 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.359894 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461662 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461721 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461766 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qwmsr\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-kube-api-access-qwmsr\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461870 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461909 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461944 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461964 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.461985 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462008 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462041 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462066 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462259 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") device mount path \"/mnt/openstack/pv04\"" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462586 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-erlang-cookie\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462940 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-plugins\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.462996 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-plugins-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.463431 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-server-conf\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.463689 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-config-data\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.467044 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-erlang-cookie-secret\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.467525 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-confd\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.468174 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-rabbitmq-tls\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.468414 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-pod-info\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.486188 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qwmsr\" (UniqueName: \"kubernetes.io/projected/2deb3a6e-b9b0-4e6d-a755-286adb0a3975-kube-api-access-qwmsr\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.490317 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage04-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage04-crc\") pod \"rabbitmq-cell1-server-0\" (UID: \"2deb3a6e-b9b0-4e6d-a755-286adb0a3975\") " pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.585948 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.862189 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-6tmxr"] Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.865084 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.869202 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-edpm-ipam" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.885683 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-6tmxr"] Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.972098 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.972151 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.972175 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-87r4w\" (UniqueName: \"kubernetes.io/projected/a80aa0e2-eeda-42ca-bd4a-4739236c032c-kube-api-access-87r4w\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.972199 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-config\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.973292 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.975158 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-svc\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:03 crc kubenswrapper[4961]: I1205 17:55:03.975200 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.036271 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-cell1-server-0"] Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.076981 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-svc\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.077031 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.077059 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.077103 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.077132 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-87r4w\" (UniqueName: \"kubernetes.io/projected/a80aa0e2-eeda-42ca-bd4a-4739236c032c-kube-api-access-87r4w\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.077160 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-config\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.077236 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.078001 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-svc\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.078051 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-sb\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.078275 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-swift-storage-0\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.078474 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-config\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.078806 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-nb\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.079310 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-openstack-edpm-ipam\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.098761 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-87r4w\" (UniqueName: \"kubernetes.io/projected/a80aa0e2-eeda-42ca-bd4a-4739236c032c-kube-api-access-87r4w\") pod \"dnsmasq-dns-5576978c7c-6tmxr\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.188387 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.640872 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-6tmxr"] Dec 05 17:55:04 crc kubenswrapper[4961]: W1205 17:55:04.645334 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda80aa0e2_eeda_42ca_bd4a_4739236c032c.slice/crio-35fce359e1defe1784af32156854fd15319a4a7cad7ff03f0c81e9bdaf75a684 WatchSource:0}: Error finding container 35fce359e1defe1784af32156854fd15319a4a7cad7ff03f0c81e9bdaf75a684: Status 404 returned error can't find the container with id 35fce359e1defe1784af32156854fd15319a4a7cad7ff03f0c81e9bdaf75a684 Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.876994 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="494ff2b1-6bb3-4c8a-be81-02fe6f884caa" path="/var/lib/kubelet/pods/494ff2b1-6bb3-4c8a-be81-02fe6f884caa/volumes" Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.908923 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2deb3a6e-b9b0-4e6d-a755-286adb0a3975","Type":"ContainerStarted","Data":"e6a9d7f29df245fe40bbdf5a8f1bb96d28a2faea2e2734ee936fae9a7261226c"} Dec 05 17:55:04 crc kubenswrapper[4961]: I1205 17:55:04.910204 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" event={"ID":"a80aa0e2-eeda-42ca-bd4a-4739236c032c","Type":"ContainerStarted","Data":"35fce359e1defe1784af32156854fd15319a4a7cad7ff03f0c81e9bdaf75a684"} Dec 05 17:55:05 crc kubenswrapper[4961]: E1205 17:55:05.417574 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5656c8dd_d36b_48ae_a272_01f789cf280d.slice/crio-4a322e66ebde2cdac5c97aaf625096b61d0f839a02da4a4245a53807e87554f6\": RecentStats: unable to find data in memory cache]" Dec 05 17:55:05 crc kubenswrapper[4961]: I1205 17:55:05.921427 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2deb3a6e-b9b0-4e6d-a755-286adb0a3975","Type":"ContainerStarted","Data":"226a7ba9a5d5b49ace67f7fca61d98b073678827e387f79dd46c9b6eacd2765b"} Dec 05 17:55:05 crc kubenswrapper[4961]: I1205 17:55:05.924202 4961 generic.go:334] "Generic (PLEG): container finished" podID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerID="ba1f1c24e3fe043078e9327a79083c6e618f8cfe821b5cd6a618f6d66d60e447" exitCode=0 Dec 05 17:55:05 crc kubenswrapper[4961]: I1205 17:55:05.924255 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" event={"ID":"a80aa0e2-eeda-42ca-bd4a-4739236c032c","Type":"ContainerDied","Data":"ba1f1c24e3fe043078e9327a79083c6e618f8cfe821b5cd6a618f6d66d60e447"} Dec 05 17:55:06 crc kubenswrapper[4961]: I1205 17:55:06.937309 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" event={"ID":"a80aa0e2-eeda-42ca-bd4a-4739236c032c","Type":"ContainerStarted","Data":"522b7ee841ceb250eb5412dd746525f93891ad36f647e56ef01f00a94bf265f8"} Dec 05 17:55:06 crc kubenswrapper[4961]: I1205 17:55:06.969157 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" podStartSLOduration=3.9691362420000003 podStartE2EDuration="3.969136242s" podCreationTimestamp="2025-12-05 17:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:55:06.961058292 +0000 UTC m=+1313.022208785" watchObservedRunningTime="2025-12-05 17:55:06.969136242 +0000 UTC m=+1313.030286715" Dec 05 17:55:07 crc kubenswrapper[4961]: I1205 17:55:07.946027 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.190128 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.260976 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kzlzv"] Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.261543 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerName="dnsmasq-dns" containerID="cri-o://c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8" gracePeriod=10 Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.433296 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-rw7cz"] Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.436160 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.470895 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-rw7cz"] Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478151 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-config\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478236 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmwcv\" (UniqueName: \"kubernetes.io/projected/1270f427-e53f-410f-b9ae-9cf12c5dffe1-kube-api-access-bmwcv\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478281 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478301 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478323 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478343 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.478368 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.581553 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-config\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.581691 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmwcv\" (UniqueName: \"kubernetes.io/projected/1270f427-e53f-410f-b9ae-9cf12c5dffe1-kube-api-access-bmwcv\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.581758 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.581939 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.582002 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.582060 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.582156 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.582770 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-config\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.582974 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-ovsdbserver-nb\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.583140 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-dns-swift-storage-0\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.583262 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-openstack-edpm-ipam\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.583278 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-ovsdbserver-sb\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.583670 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/1270f427-e53f-410f-b9ae-9cf12c5dffe1-dns-svc\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.607570 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmwcv\" (UniqueName: \"kubernetes.io/projected/1270f427-e53f-410f-b9ae-9cf12c5dffe1-kube-api-access-bmwcv\") pod \"dnsmasq-dns-8c6f6df99-rw7cz\" (UID: \"1270f427-e53f-410f-b9ae-9cf12c5dffe1\") " pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.760324 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:14 crc kubenswrapper[4961]: I1205 17:55:14.975352 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.098175 4961 generic.go:334] "Generic (PLEG): container finished" podID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerID="c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8" exitCode=0 Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.098496 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" event={"ID":"caec0c5b-2cc3-4fdb-9f16-8653966fca15","Type":"ContainerDied","Data":"c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8"} Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.098522 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" event={"ID":"caec0c5b-2cc3-4fdb-9f16-8653966fca15","Type":"ContainerDied","Data":"84a352c084cead6fa7fbb576e76ecbc954243be477a6c7ddc838743b12050a86"} Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.098538 4961 scope.go:117] "RemoveContainer" containerID="c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.098723 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5c7b6c5df9-kzlzv" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.122308 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-svc\") pod \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.122398 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-config\") pod \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.122545 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-nb\") pod \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.122576 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-swift-storage-0\") pod \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.122709 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-sb\") pod \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.122808 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cd29z\" (UniqueName: \"kubernetes.io/projected/caec0c5b-2cc3-4fdb-9f16-8653966fca15-kube-api-access-cd29z\") pod \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\" (UID: \"caec0c5b-2cc3-4fdb-9f16-8653966fca15\") " Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.131084 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/caec0c5b-2cc3-4fdb-9f16-8653966fca15-kube-api-access-cd29z" (OuterVolumeSpecName: "kube-api-access-cd29z") pod "caec0c5b-2cc3-4fdb-9f16-8653966fca15" (UID: "caec0c5b-2cc3-4fdb-9f16-8653966fca15"). InnerVolumeSpecName "kube-api-access-cd29z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.161824 4961 scope.go:117] "RemoveContainer" containerID="dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.194591 4961 scope.go:117] "RemoveContainer" containerID="c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.195251 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "caec0c5b-2cc3-4fdb-9f16-8653966fca15" (UID: "caec0c5b-2cc3-4fdb-9f16-8653966fca15"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:15 crc kubenswrapper[4961]: E1205 17:55:15.195910 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8\": container with ID starting with c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8 not found: ID does not exist" containerID="c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.195948 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8"} err="failed to get container status \"c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8\": rpc error: code = NotFound desc = could not find container \"c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8\": container with ID starting with c4fd39dd1bac45bae4d0dbeeeb2a1c9f57446abf2c230f71285d900a2ca0d6a8 not found: ID does not exist" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.195987 4961 scope.go:117] "RemoveContainer" containerID="dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059" Dec 05 17:55:15 crc kubenswrapper[4961]: E1205 17:55:15.196389 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059\": container with ID starting with dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059 not found: ID does not exist" containerID="dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.196409 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059"} err="failed to get container status \"dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059\": rpc error: code = NotFound desc = could not find container \"dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059\": container with ID starting with dfbd4274978d1a405efcc01b365fb7eebc3baf7c12a5ed018d931ce590fe5059 not found: ID does not exist" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.210455 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "caec0c5b-2cc3-4fdb-9f16-8653966fca15" (UID: "caec0c5b-2cc3-4fdb-9f16-8653966fca15"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.212248 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "caec0c5b-2cc3-4fdb-9f16-8653966fca15" (UID: "caec0c5b-2cc3-4fdb-9f16-8653966fca15"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.212263 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "caec0c5b-2cc3-4fdb-9f16-8653966fca15" (UID: "caec0c5b-2cc3-4fdb-9f16-8653966fca15"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.213087 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-config" (OuterVolumeSpecName: "config") pod "caec0c5b-2cc3-4fdb-9f16-8653966fca15" (UID: "caec0c5b-2cc3-4fdb-9f16-8653966fca15"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.229940 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.229970 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.229981 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.229992 4961 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.230001 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/caec0c5b-2cc3-4fdb-9f16-8653966fca15-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.230009 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cd29z\" (UniqueName: \"kubernetes.io/projected/caec0c5b-2cc3-4fdb-9f16-8653966fca15-kube-api-access-cd29z\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:15 crc kubenswrapper[4961]: W1205 17:55:15.472949 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1270f427_e53f_410f_b9ae_9cf12c5dffe1.slice/crio-65b92bedb5105cbc891762ddd280035917485d7e07ae6e082ef707ee4e6e8305 WatchSource:0}: Error finding container 65b92bedb5105cbc891762ddd280035917485d7e07ae6e082ef707ee4e6e8305: Status 404 returned error can't find the container with id 65b92bedb5105cbc891762ddd280035917485d7e07ae6e082ef707ee4e6e8305 Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.481453 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/dnsmasq-dns-8c6f6df99-rw7cz"] Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.490758 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kzlzv"] Dec 05 17:55:15 crc kubenswrapper[4961]: I1205 17:55:15.501924 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5c7b6c5df9-kzlzv"] Dec 05 17:55:16 crc kubenswrapper[4961]: I1205 17:55:16.109039 4961 generic.go:334] "Generic (PLEG): container finished" podID="1270f427-e53f-410f-b9ae-9cf12c5dffe1" containerID="8f886bb438d46b4dca2f76f1a6213ee10e07444a41e42249aed32bce19abd131" exitCode=0 Dec 05 17:55:16 crc kubenswrapper[4961]: I1205 17:55:16.109441 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" event={"ID":"1270f427-e53f-410f-b9ae-9cf12c5dffe1","Type":"ContainerDied","Data":"8f886bb438d46b4dca2f76f1a6213ee10e07444a41e42249aed32bce19abd131"} Dec 05 17:55:16 crc kubenswrapper[4961]: I1205 17:55:16.109482 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" event={"ID":"1270f427-e53f-410f-b9ae-9cf12c5dffe1","Type":"ContainerStarted","Data":"65b92bedb5105cbc891762ddd280035917485d7e07ae6e082ef707ee4e6e8305"} Dec 05 17:55:16 crc kubenswrapper[4961]: I1205 17:55:16.874930 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" path="/var/lib/kubelet/pods/caec0c5b-2cc3-4fdb-9f16-8653966fca15/volumes" Dec 05 17:55:17 crc kubenswrapper[4961]: I1205 17:55:17.123339 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" event={"ID":"1270f427-e53f-410f-b9ae-9cf12c5dffe1","Type":"ContainerStarted","Data":"9c99c56f759a2ae780dd21d9663810d17248d111bd872bc60c4afcfa6ce9419e"} Dec 05 17:55:17 crc kubenswrapper[4961]: I1205 17:55:17.123593 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:17 crc kubenswrapper[4961]: I1205 17:55:17.154678 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" podStartSLOduration=3.154657899 podStartE2EDuration="3.154657899s" podCreationTimestamp="2025-12-05 17:55:14 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:55:17.151355068 +0000 UTC m=+1323.212505551" watchObservedRunningTime="2025-12-05 17:55:17.154657899 +0000 UTC m=+1323.215808372" Dec 05 17:55:24 crc kubenswrapper[4961]: I1205 17:55:24.762834 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/dnsmasq-dns-8c6f6df99-rw7cz" Dec 05 17:55:24 crc kubenswrapper[4961]: I1205 17:55:24.836187 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-6tmxr"] Dec 05 17:55:24 crc kubenswrapper[4961]: I1205 17:55:24.836512 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerName="dnsmasq-dns" containerID="cri-o://522b7ee841ceb250eb5412dd746525f93891ad36f647e56ef01f00a94bf265f8" gracePeriod=10 Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.228598 4961 generic.go:334] "Generic (PLEG): container finished" podID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerID="522b7ee841ceb250eb5412dd746525f93891ad36f647e56ef01f00a94bf265f8" exitCode=0 Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.228869 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" event={"ID":"a80aa0e2-eeda-42ca-bd4a-4739236c032c","Type":"ContainerDied","Data":"522b7ee841ceb250eb5412dd746525f93891ad36f647e56ef01f00a94bf265f8"} Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.364006 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.450665 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-nb\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.450792 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-swift-storage-0\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.450861 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-config\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.450883 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-87r4w\" (UniqueName: \"kubernetes.io/projected/a80aa0e2-eeda-42ca-bd4a-4739236c032c-kube-api-access-87r4w\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.450947 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-svc\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.450976 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-sb\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.451072 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-openstack-edpm-ipam\") pod \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\" (UID: \"a80aa0e2-eeda-42ca-bd4a-4739236c032c\") " Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.460912 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a80aa0e2-eeda-42ca-bd4a-4739236c032c-kube-api-access-87r4w" (OuterVolumeSpecName: "kube-api-access-87r4w") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "kube-api-access-87r4w". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.507761 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-openstack-edpm-ipam" (OuterVolumeSpecName: "openstack-edpm-ipam") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "openstack-edpm-ipam". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.514416 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-swift-storage-0" (OuterVolumeSpecName: "dns-swift-storage-0") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "dns-swift-storage-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.539405 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-nb" (OuterVolumeSpecName: "ovsdbserver-nb") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "ovsdbserver-nb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.548681 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-sb" (OuterVolumeSpecName: "ovsdbserver-sb") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "ovsdbserver-sb". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.551748 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-svc" (OuterVolumeSpecName: "dns-svc") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "dns-svc". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.553293 4961 reconciler_common.go:293] "Volume detached for volume \"dns-svc\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-svc\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.553362 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-sb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-sb\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.553378 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.553391 4961 reconciler_common.go:293] "Volume detached for volume \"ovsdbserver-nb\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-ovsdbserver-nb\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.553402 4961 reconciler_common.go:293] "Volume detached for volume \"dns-swift-storage-0\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-dns-swift-storage-0\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.553413 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-87r4w\" (UniqueName: \"kubernetes.io/projected/a80aa0e2-eeda-42ca-bd4a-4739236c032c-kube-api-access-87r4w\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.556748 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-config" (OuterVolumeSpecName: "config") pod "a80aa0e2-eeda-42ca-bd4a-4739236c032c" (UID: "a80aa0e2-eeda-42ca-bd4a-4739236c032c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 17:55:25 crc kubenswrapper[4961]: I1205 17:55:25.660246 4961 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a80aa0e2-eeda-42ca-bd4a-4739236c032c-config\") on node \"crc\" DevicePath \"\"" Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.240387 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" event={"ID":"a80aa0e2-eeda-42ca-bd4a-4739236c032c","Type":"ContainerDied","Data":"35fce359e1defe1784af32156854fd15319a4a7cad7ff03f0c81e9bdaf75a684"} Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.240742 4961 scope.go:117] "RemoveContainer" containerID="522b7ee841ceb250eb5412dd746525f93891ad36f647e56ef01f00a94bf265f8" Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.240488 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/dnsmasq-dns-5576978c7c-6tmxr" Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.285187 4961 scope.go:117] "RemoveContainer" containerID="ba1f1c24e3fe043078e9327a79083c6e618f8cfe821b5cd6a618f6d66d60e447" Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.288307 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-6tmxr"] Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.297038 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/dnsmasq-dns-5576978c7c-6tmxr"] Dec 05 17:55:26 crc kubenswrapper[4961]: I1205 17:55:26.875185 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" path="/var/lib/kubelet/pods/a80aa0e2-eeda-42ca-bd4a-4739236c032c/volumes" Dec 05 17:55:27 crc kubenswrapper[4961]: I1205 17:55:27.246176 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:55:27 crc kubenswrapper[4961]: I1205 17:55:27.246228 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:55:32 crc kubenswrapper[4961]: I1205 17:55:32.967149 4961 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","burstable","pod346da897-3e71-4d6f-b17d-fe5f905dd705"] err="unable to destroy cgroup paths for cgroup [kubepods burstable pod346da897-3e71-4d6f-b17d-fe5f905dd705] : Timed out while waiting for systemd to remove kubepods-burstable-pod346da897_3e71_4d6f_b17d_fe5f905dd705.slice" Dec 05 17:55:32 crc kubenswrapper[4961]: E1205 17:55:32.967760 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods burstable pod346da897-3e71-4d6f-b17d-fe5f905dd705] : unable to destroy cgroup paths for cgroup [kubepods burstable pod346da897-3e71-4d6f-b17d-fe5f905dd705] : Timed out while waiting for systemd to remove kubepods-burstable-pod346da897_3e71_4d6f_b17d_fe5f905dd705.slice" pod="openstack/rabbitmq-server-0" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.313188 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.342048 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.351399 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368036 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:55:33 crc kubenswrapper[4961]: E1205 17:55:33.368501 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerName="dnsmasq-dns" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368525 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerName="dnsmasq-dns" Dec 05 17:55:33 crc kubenswrapper[4961]: E1205 17:55:33.368550 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerName="dnsmasq-dns" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368558 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerName="dnsmasq-dns" Dec 05 17:55:33 crc kubenswrapper[4961]: E1205 17:55:33.368587 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerName="init" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368595 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerName="init" Dec 05 17:55:33 crc kubenswrapper[4961]: E1205 17:55:33.368614 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerName="init" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368623 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerName="init" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368860 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="caec0c5b-2cc3-4fdb-9f16-8653966fca15" containerName="dnsmasq-dns" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.368890 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="a80aa0e2-eeda-42ca-bd4a-4739236c032c" containerName="dnsmasq-dns" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.369946 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.373011 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-config-data" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.373354 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-erlang-cookie" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.373591 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-plugins-conf" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.373989 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"rabbitmq-server-conf" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.374209 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-default-user" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.374379 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"cert-rabbitmq-svc" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.375449 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"rabbitmq-server-dockercfg-fh7kv" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.379514 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420394 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-config-data\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420459 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7d49\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-kube-api-access-r7d49\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420537 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420576 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420602 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c21abdd4-f06b-4865-8880-0603525e1cb1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420623 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420706 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420838 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420918 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420959 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c21abdd4-f06b-4865-8880-0603525e1cb1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.420992 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522492 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522560 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522592 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c21abdd4-f06b-4865-8880-0603525e1cb1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522618 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522665 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522703 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522757 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522826 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c21abdd4-f06b-4865-8880-0603525e1cb1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522856 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522913 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-config-data\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522945 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7d49\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-kube-api-access-r7d49\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.522950 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") device mount path \"/mnt/openstack/pv03\"" pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.523252 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.523606 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.523897 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-config-data\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.524038 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.524227 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"server-conf\" (UniqueName: \"kubernetes.io/configmap/c21abdd4-f06b-4865-8880-0603525e1cb1-server-conf\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.528875 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/c21abdd4-f06b-4865-8880-0603525e1cb1-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.529657 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.529718 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-tls\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-rabbitmq-tls\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.531808 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/c21abdd4-f06b-4865-8880-0603525e1cb1-pod-info\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.546899 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7d49\" (UniqueName: \"kubernetes.io/projected/c21abdd4-f06b-4865-8880-0603525e1cb1-kube-api-access-r7d49\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.578623 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"rabbitmq-server-0\" (UID: \"c21abdd4-f06b-4865-8880-0603525e1cb1\") " pod="openstack/rabbitmq-server-0" Dec 05 17:55:33 crc kubenswrapper[4961]: I1205 17:55:33.700898 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/rabbitmq-server-0" Dec 05 17:55:34 crc kubenswrapper[4961]: I1205 17:55:34.155796 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/rabbitmq-server-0"] Dec 05 17:55:34 crc kubenswrapper[4961]: I1205 17:55:34.322521 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c21abdd4-f06b-4865-8880-0603525e1cb1","Type":"ContainerStarted","Data":"d6a47f6d4452082250d8b65974f9c995dada7c9571bd08eae961b2f0f955da39"} Dec 05 17:55:34 crc kubenswrapper[4961]: I1205 17:55:34.886304 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="346da897-3e71-4d6f-b17d-fe5f905dd705" path="/var/lib/kubelet/pods/346da897-3e71-4d6f-b17d-fe5f905dd705/volumes" Dec 05 17:55:36 crc kubenswrapper[4961]: I1205 17:55:36.342732 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c21abdd4-f06b-4865-8880-0603525e1cb1","Type":"ContainerStarted","Data":"3020da6f7d982c36d2173fc9ce0fd1b1823bf8cfb437ee22a96db13835e8265b"} Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.114634 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8"] Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.117351 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.130960 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.131256 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.131440 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.131609 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.158888 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8"] Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.210953 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.211809 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.212065 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.212348 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w79n6\" (UniqueName: \"kubernetes.io/projected/1c42b2d0-4525-4847-a505-a625b88765b9-kube-api-access-w79n6\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.314813 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.314906 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.314963 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w79n6\" (UniqueName: \"kubernetes.io/projected/1c42b2d0-4525-4847-a505-a625b88765b9-kube-api-access-w79n6\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.315018 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.323346 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-repo-setup-combined-ca-bundle\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.323408 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-inventory\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.326411 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-ssh-key\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.332371 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w79n6\" (UniqueName: \"kubernetes.io/projected/1c42b2d0-4525-4847-a505-a625b88765b9-kube-api-access-w79n6\") pod \"repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.369892 4961 generic.go:334] "Generic (PLEG): container finished" podID="2deb3a6e-b9b0-4e6d-a755-286adb0a3975" containerID="226a7ba9a5d5b49ace67f7fca61d98b073678827e387f79dd46c9b6eacd2765b" exitCode=0 Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.369964 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2deb3a6e-b9b0-4e6d-a755-286adb0a3975","Type":"ContainerDied","Data":"226a7ba9a5d5b49ace67f7fca61d98b073678827e387f79dd46c9b6eacd2765b"} Dec 05 17:55:38 crc kubenswrapper[4961]: I1205 17:55:38.456637 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:55:39 crc kubenswrapper[4961]: I1205 17:55:39.031712 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8"] Dec 05 17:55:39 crc kubenswrapper[4961]: I1205 17:55:39.392652 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-cell1-server-0" event={"ID":"2deb3a6e-b9b0-4e6d-a755-286adb0a3975","Type":"ContainerStarted","Data":"d07154bb4291265eb4508d66910b78f6b5d3e41718f06428ccfcaf447a6dd3f6"} Dec 05 17:55:39 crc kubenswrapper[4961]: I1205 17:55:39.393183 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:39 crc kubenswrapper[4961]: I1205 17:55:39.398393 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" event={"ID":"1c42b2d0-4525-4847-a505-a625b88765b9","Type":"ContainerStarted","Data":"99fa522fd657a89abc7b4535e86eacdf2d029c77dca9a3881ff71f3b9f671f9c"} Dec 05 17:55:39 crc kubenswrapper[4961]: I1205 17:55:39.433055 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-cell1-server-0" podStartSLOduration=36.433026941 podStartE2EDuration="36.433026941s" podCreationTimestamp="2025-12-05 17:55:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:55:39.422410968 +0000 UTC m=+1345.483561461" watchObservedRunningTime="2025-12-05 17:55:39.433026941 +0000 UTC m=+1345.494177414" Dec 05 17:55:49 crc kubenswrapper[4961]: I1205 17:55:49.539709 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" event={"ID":"1c42b2d0-4525-4847-a505-a625b88765b9","Type":"ContainerStarted","Data":"5377c577df47250263f9170fc1b9f487e26062f0b7541e8b03a019692f457388"} Dec 05 17:55:49 crc kubenswrapper[4961]: I1205 17:55:49.576514 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" podStartSLOduration=1.657161023 podStartE2EDuration="11.576491058s" podCreationTimestamp="2025-12-05 17:55:38 +0000 UTC" firstStartedPulling="2025-12-05 17:55:39.040509031 +0000 UTC m=+1345.101659504" lastFinishedPulling="2025-12-05 17:55:48.959839066 +0000 UTC m=+1355.020989539" observedRunningTime="2025-12-05 17:55:49.569229098 +0000 UTC m=+1355.630379601" watchObservedRunningTime="2025-12-05 17:55:49.576491058 +0000 UTC m=+1355.637641551" Dec 05 17:55:53 crc kubenswrapper[4961]: I1205 17:55:53.589981 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-cell1-server-0" Dec 05 17:55:57 crc kubenswrapper[4961]: I1205 17:55:57.245927 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:55:57 crc kubenswrapper[4961]: I1205 17:55:57.246175 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:56:00 crc kubenswrapper[4961]: I1205 17:56:00.650469 4961 generic.go:334] "Generic (PLEG): container finished" podID="1c42b2d0-4525-4847-a505-a625b88765b9" containerID="5377c577df47250263f9170fc1b9f487e26062f0b7541e8b03a019692f457388" exitCode=0 Dec 05 17:56:00 crc kubenswrapper[4961]: I1205 17:56:00.651326 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" event={"ID":"1c42b2d0-4525-4847-a505-a625b88765b9","Type":"ContainerDied","Data":"5377c577df47250263f9170fc1b9f487e26062f0b7541e8b03a019692f457388"} Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.071579 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.165391 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-repo-setup-combined-ca-bundle\") pod \"1c42b2d0-4525-4847-a505-a625b88765b9\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.165484 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-ssh-key\") pod \"1c42b2d0-4525-4847-a505-a625b88765b9\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.165732 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-inventory\") pod \"1c42b2d0-4525-4847-a505-a625b88765b9\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.165752 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w79n6\" (UniqueName: \"kubernetes.io/projected/1c42b2d0-4525-4847-a505-a625b88765b9-kube-api-access-w79n6\") pod \"1c42b2d0-4525-4847-a505-a625b88765b9\" (UID: \"1c42b2d0-4525-4847-a505-a625b88765b9\") " Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.171430 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c42b2d0-4525-4847-a505-a625b88765b9-kube-api-access-w79n6" (OuterVolumeSpecName: "kube-api-access-w79n6") pod "1c42b2d0-4525-4847-a505-a625b88765b9" (UID: "1c42b2d0-4525-4847-a505-a625b88765b9"). InnerVolumeSpecName "kube-api-access-w79n6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.172023 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "1c42b2d0-4525-4847-a505-a625b88765b9" (UID: "1c42b2d0-4525-4847-a505-a625b88765b9"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.196930 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "1c42b2d0-4525-4847-a505-a625b88765b9" (UID: "1c42b2d0-4525-4847-a505-a625b88765b9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.202179 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-inventory" (OuterVolumeSpecName: "inventory") pod "1c42b2d0-4525-4847-a505-a625b88765b9" (UID: "1c42b2d0-4525-4847-a505-a625b88765b9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.267704 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.267749 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w79n6\" (UniqueName: \"kubernetes.io/projected/1c42b2d0-4525-4847-a505-a625b88765b9-kube-api-access-w79n6\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.267769 4961 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.267797 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/1c42b2d0-4525-4847-a505-a625b88765b9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.670252 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" event={"ID":"1c42b2d0-4525-4847-a505-a625b88765b9","Type":"ContainerDied","Data":"99fa522fd657a89abc7b4535e86eacdf2d029c77dca9a3881ff71f3b9f671f9c"} Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.670608 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="99fa522fd657a89abc7b4535e86eacdf2d029c77dca9a3881ff71f3b9f671f9c" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.670308 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.746359 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4"] Dec 05 17:56:02 crc kubenswrapper[4961]: E1205 17:56:02.746875 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c42b2d0-4525-4847-a505-a625b88765b9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.746897 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c42b2d0-4525-4847-a505-a625b88765b9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.747092 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c42b2d0-4525-4847-a505-a625b88765b9" containerName="repo-setup-edpm-deployment-openstack-edpm-ipam" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.747928 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.757351 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.757664 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.757879 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.758372 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.760429 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4"] Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.782097 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.782237 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.782317 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kf8j2\" (UniqueName: \"kubernetes.io/projected/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-kube-api-access-kf8j2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.884437 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.884585 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kf8j2\" (UniqueName: \"kubernetes.io/projected/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-kube-api-access-kf8j2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.884688 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.897009 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-ssh-key\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.898479 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-inventory\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:02 crc kubenswrapper[4961]: I1205 17:56:02.902024 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kf8j2\" (UniqueName: \"kubernetes.io/projected/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-kube-api-access-kf8j2\") pod \"redhat-edpm-deployment-openstack-edpm-ipam-5wwq4\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:03 crc kubenswrapper[4961]: I1205 17:56:03.066697 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:03 crc kubenswrapper[4961]: I1205 17:56:03.363487 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4"] Dec 05 17:56:03 crc kubenswrapper[4961]: I1205 17:56:03.684003 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" event={"ID":"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3","Type":"ContainerStarted","Data":"60e05f290ebdda12e09014c8def80a58935260843f0201743c89c9a44bba4ac8"} Dec 05 17:56:04 crc kubenswrapper[4961]: I1205 17:56:04.696545 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" event={"ID":"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3","Type":"ContainerStarted","Data":"3e239c9bd288964500fcd7d9e1e98634a4ee2bec4c2da3e50a41a849439e5caf"} Dec 05 17:56:04 crc kubenswrapper[4961]: I1205 17:56:04.726435 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" podStartSLOduration=2.307455866 podStartE2EDuration="2.726405601s" podCreationTimestamp="2025-12-05 17:56:02 +0000 UTC" firstStartedPulling="2025-12-05 17:56:03.37203818 +0000 UTC m=+1369.433188653" lastFinishedPulling="2025-12-05 17:56:03.790987895 +0000 UTC m=+1369.852138388" observedRunningTime="2025-12-05 17:56:04.717250044 +0000 UTC m=+1370.778400557" watchObservedRunningTime="2025-12-05 17:56:04.726405601 +0000 UTC m=+1370.787556084" Dec 05 17:56:06 crc kubenswrapper[4961]: I1205 17:56:06.716092 4961 generic.go:334] "Generic (PLEG): container finished" podID="c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" containerID="3e239c9bd288964500fcd7d9e1e98634a4ee2bec4c2da3e50a41a849439e5caf" exitCode=0 Dec 05 17:56:06 crc kubenswrapper[4961]: I1205 17:56:06.716173 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" event={"ID":"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3","Type":"ContainerDied","Data":"3e239c9bd288964500fcd7d9e1e98634a4ee2bec4c2da3e50a41a849439e5caf"} Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.182030 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.311306 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-ssh-key\") pod \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.311826 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kf8j2\" (UniqueName: \"kubernetes.io/projected/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-kube-api-access-kf8j2\") pod \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.312045 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-inventory\") pod \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\" (UID: \"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3\") " Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.317418 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-kube-api-access-kf8j2" (OuterVolumeSpecName: "kube-api-access-kf8j2") pod "c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" (UID: "c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3"). InnerVolumeSpecName "kube-api-access-kf8j2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.349947 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-inventory" (OuterVolumeSpecName: "inventory") pod "c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" (UID: "c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.362847 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" (UID: "c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.415620 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.415673 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.415692 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kf8j2\" (UniqueName: \"kubernetes.io/projected/c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3-kube-api-access-kf8j2\") on node \"crc\" DevicePath \"\"" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.740680 4961 generic.go:334] "Generic (PLEG): container finished" podID="c21abdd4-f06b-4865-8880-0603525e1cb1" containerID="3020da6f7d982c36d2173fc9ce0fd1b1823bf8cfb437ee22a96db13835e8265b" exitCode=0 Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.740861 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c21abdd4-f06b-4865-8880-0603525e1cb1","Type":"ContainerDied","Data":"3020da6f7d982c36d2173fc9ce0fd1b1823bf8cfb437ee22a96db13835e8265b"} Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.744708 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" event={"ID":"c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3","Type":"ContainerDied","Data":"60e05f290ebdda12e09014c8def80a58935260843f0201743c89c9a44bba4ac8"} Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.744759 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="60e05f290ebdda12e09014c8def80a58935260843f0201743c89c9a44bba4ac8" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.744904 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/redhat-edpm-deployment-openstack-edpm-ipam-5wwq4" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.835055 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml"] Dec 05 17:56:08 crc kubenswrapper[4961]: E1205 17:56:08.835645 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.835673 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.835990 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3" containerName="redhat-edpm-deployment-openstack-edpm-ipam" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.836968 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.839630 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.839702 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.839982 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.840935 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.847277 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml"] Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.926600 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.926695 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dtfl7\" (UniqueName: \"kubernetes.io/projected/b9050523-5c05-47cd-9e51-85703488427f-kube-api-access-dtfl7\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.926750 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:08 crc kubenswrapper[4961]: I1205 17:56:08.926936 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.029559 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.029662 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.029709 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dtfl7\" (UniqueName: \"kubernetes.io/projected/b9050523-5c05-47cd-9e51-85703488427f-kube-api-access-dtfl7\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.029753 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.035111 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-bootstrap-combined-ca-bundle\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.036187 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-inventory\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.043114 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-ssh-key\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.046138 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dtfl7\" (UniqueName: \"kubernetes.io/projected/b9050523-5c05-47cd-9e51-85703488427f-kube-api-access-dtfl7\") pod \"bootstrap-edpm-deployment-openstack-edpm-ipam-84hml\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.156493 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.657982 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml"] Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.756958 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/rabbitmq-server-0" event={"ID":"c21abdd4-f06b-4865-8880-0603525e1cb1","Type":"ContainerStarted","Data":"f76c6e30c8139a780893b5b11cc09a242afc82935fc84095b3e5aa64eaa9812b"} Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.757171 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack/rabbitmq-server-0" Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.758498 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" event={"ID":"b9050523-5c05-47cd-9e51-85703488427f","Type":"ContainerStarted","Data":"7741a079198fa7ecf2e6d41bb6f47dde9e55d5d45b8e945288570c505953bd39"} Dec 05 17:56:09 crc kubenswrapper[4961]: I1205 17:56:09.779610 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/rabbitmq-server-0" podStartSLOduration=36.779586285 podStartE2EDuration="36.779586285s" podCreationTimestamp="2025-12-05 17:55:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 17:56:09.777356359 +0000 UTC m=+1375.838506852" watchObservedRunningTime="2025-12-05 17:56:09.779586285 +0000 UTC m=+1375.840736758" Dec 05 17:56:10 crc kubenswrapper[4961]: I1205 17:56:10.770899 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" event={"ID":"b9050523-5c05-47cd-9e51-85703488427f","Type":"ContainerStarted","Data":"a94c4392237be263a734bc46dcbe6626f026c487f5b59430a6a474cd47c1a81e"} Dec 05 17:56:10 crc kubenswrapper[4961]: I1205 17:56:10.814539 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" podStartSLOduration=2.379759158 podStartE2EDuration="2.814509245s" podCreationTimestamp="2025-12-05 17:56:08 +0000 UTC" firstStartedPulling="2025-12-05 17:56:09.660526576 +0000 UTC m=+1375.721677049" lastFinishedPulling="2025-12-05 17:56:10.095276663 +0000 UTC m=+1376.156427136" observedRunningTime="2025-12-05 17:56:10.812502816 +0000 UTC m=+1376.873653289" watchObservedRunningTime="2025-12-05 17:56:10.814509245 +0000 UTC m=+1376.875659718" Dec 05 17:56:23 crc kubenswrapper[4961]: I1205 17:56:23.704074 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack/rabbitmq-server-0" Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.245708 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.246265 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.246316 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.247088 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"511b0e3c93cece387d56804a522b75bb0ef062e17cb5f9c42dba313a290e70af"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.247135 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://511b0e3c93cece387d56804a522b75bb0ef062e17cb5f9c42dba313a290e70af" gracePeriod=600 Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.934636 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="511b0e3c93cece387d56804a522b75bb0ef062e17cb5f9c42dba313a290e70af" exitCode=0 Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.934691 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"511b0e3c93cece387d56804a522b75bb0ef062e17cb5f9c42dba313a290e70af"} Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.935190 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9"} Dec 05 17:56:27 crc kubenswrapper[4961]: I1205 17:56:27.935212 4961 scope.go:117] "RemoveContainer" containerID="ea0a333281fac885bbddc52760bdc853687db225a39907ca95a10b1226157c7e" Dec 05 17:56:42 crc kubenswrapper[4961]: I1205 17:56:42.299219 4961 scope.go:117] "RemoveContainer" containerID="0c46fd593cd74d7c79826f40150b74459107d2fa2b260352f72626e5aace9e13" Dec 05 17:57:42 crc kubenswrapper[4961]: I1205 17:57:42.366867 4961 scope.go:117] "RemoveContainer" containerID="04c35517b40d2cfc69e880c0d5a758e9902cf4718a8346412d5b30149498f7db" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.485732 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-52t5k"] Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.489057 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.493514 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-52t5k"] Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.614507 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-catalog-content\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.614561 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-utilities\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.615200 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-68wvr\" (UniqueName: \"kubernetes.io/projected/c3b6600b-5933-44dd-b124-3f14ab0ed972-kube-api-access-68wvr\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.718458 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-catalog-content\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.718821 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-utilities\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.718869 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-68wvr\" (UniqueName: \"kubernetes.io/projected/c3b6600b-5933-44dd-b124-3f14ab0ed972-kube-api-access-68wvr\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.719008 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-catalog-content\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.719259 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-utilities\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.747139 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-68wvr\" (UniqueName: \"kubernetes.io/projected/c3b6600b-5933-44dd-b124-3f14ab0ed972-kube-api-access-68wvr\") pod \"redhat-marketplace-52t5k\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:57 crc kubenswrapper[4961]: I1205 17:57:57.825762 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:57:58 crc kubenswrapper[4961]: I1205 17:57:58.312696 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-52t5k"] Dec 05 17:57:58 crc kubenswrapper[4961]: I1205 17:57:58.887666 4961 generic.go:334] "Generic (PLEG): container finished" podID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerID="a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f" exitCode=0 Dec 05 17:57:58 crc kubenswrapper[4961]: I1205 17:57:58.887752 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerDied","Data":"a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f"} Dec 05 17:57:58 crc kubenswrapper[4961]: I1205 17:57:58.888075 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerStarted","Data":"096d21fd30225d2284b6e9c2012296920b0e79fe38308f03a233d21e06e6fc74"} Dec 05 17:57:59 crc kubenswrapper[4961]: I1205 17:57:59.899986 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerStarted","Data":"6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4"} Dec 05 17:58:00 crc kubenswrapper[4961]: I1205 17:58:00.914501 4961 generic.go:334] "Generic (PLEG): container finished" podID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerID="6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4" exitCode=0 Dec 05 17:58:00 crc kubenswrapper[4961]: I1205 17:58:00.914558 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerDied","Data":"6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4"} Dec 05 17:58:01 crc kubenswrapper[4961]: I1205 17:58:01.930160 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerStarted","Data":"779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b"} Dec 05 17:58:01 crc kubenswrapper[4961]: I1205 17:58:01.955953 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-52t5k" podStartSLOduration=2.46485968 podStartE2EDuration="4.955934015s" podCreationTimestamp="2025-12-05 17:57:57 +0000 UTC" firstStartedPulling="2025-12-05 17:57:58.889489352 +0000 UTC m=+1484.950639835" lastFinishedPulling="2025-12-05 17:58:01.380563707 +0000 UTC m=+1487.441714170" observedRunningTime="2025-12-05 17:58:01.946860899 +0000 UTC m=+1488.008011392" watchObservedRunningTime="2025-12-05 17:58:01.955934015 +0000 UTC m=+1488.017084488" Dec 05 17:58:07 crc kubenswrapper[4961]: I1205 17:58:07.826308 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:58:07 crc kubenswrapper[4961]: I1205 17:58:07.828018 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:58:07 crc kubenswrapper[4961]: I1205 17:58:07.884678 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:58:08 crc kubenswrapper[4961]: I1205 17:58:08.034101 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:58:08 crc kubenswrapper[4961]: I1205 17:58:08.124771 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-52t5k"] Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.012633 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-52t5k" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="registry-server" containerID="cri-o://779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b" gracePeriod=2 Dec 05 17:58:10 crc kubenswrapper[4961]: E1205 17:58:10.240156 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3b6600b_5933_44dd_b124_3f14ab0ed972.slice/crio-779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3b6600b_5933_44dd_b124_3f14ab0ed972.slice/crio-conmon-779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.451123 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.577908 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-68wvr\" (UniqueName: \"kubernetes.io/projected/c3b6600b-5933-44dd-b124-3f14ab0ed972-kube-api-access-68wvr\") pod \"c3b6600b-5933-44dd-b124-3f14ab0ed972\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.578067 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-catalog-content\") pod \"c3b6600b-5933-44dd-b124-3f14ab0ed972\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.578089 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-utilities\") pod \"c3b6600b-5933-44dd-b124-3f14ab0ed972\" (UID: \"c3b6600b-5933-44dd-b124-3f14ab0ed972\") " Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.579080 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-utilities" (OuterVolumeSpecName: "utilities") pod "c3b6600b-5933-44dd-b124-3f14ab0ed972" (UID: "c3b6600b-5933-44dd-b124-3f14ab0ed972"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.584013 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c3b6600b-5933-44dd-b124-3f14ab0ed972-kube-api-access-68wvr" (OuterVolumeSpecName: "kube-api-access-68wvr") pod "c3b6600b-5933-44dd-b124-3f14ab0ed972" (UID: "c3b6600b-5933-44dd-b124-3f14ab0ed972"). InnerVolumeSpecName "kube-api-access-68wvr". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.599754 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c3b6600b-5933-44dd-b124-3f14ab0ed972" (UID: "c3b6600b-5933-44dd-b124-3f14ab0ed972"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.680437 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.680481 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c3b6600b-5933-44dd-b124-3f14ab0ed972-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:58:10 crc kubenswrapper[4961]: I1205 17:58:10.680492 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-68wvr\" (UniqueName: \"kubernetes.io/projected/c3b6600b-5933-44dd-b124-3f14ab0ed972-kube-api-access-68wvr\") on node \"crc\" DevicePath \"\"" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.026544 4961 generic.go:334] "Generic (PLEG): container finished" podID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerID="779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b" exitCode=0 Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.026587 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-52t5k" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.026599 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerDied","Data":"779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b"} Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.026639 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-52t5k" event={"ID":"c3b6600b-5933-44dd-b124-3f14ab0ed972","Type":"ContainerDied","Data":"096d21fd30225d2284b6e9c2012296920b0e79fe38308f03a233d21e06e6fc74"} Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.026661 4961 scope.go:117] "RemoveContainer" containerID="779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.055618 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-52t5k"] Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.067083 4961 scope.go:117] "RemoveContainer" containerID="6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.067437 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-52t5k"] Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.085678 4961 scope.go:117] "RemoveContainer" containerID="a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.135175 4961 scope.go:117] "RemoveContainer" containerID="779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b" Dec 05 17:58:11 crc kubenswrapper[4961]: E1205 17:58:11.135637 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b\": container with ID starting with 779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b not found: ID does not exist" containerID="779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.135671 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b"} err="failed to get container status \"779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b\": rpc error: code = NotFound desc = could not find container \"779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b\": container with ID starting with 779e7c0a081cbcba4f6fca8c7c4bf34c9ef7417978df9cd76de56ddd0a6d6e7b not found: ID does not exist" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.135694 4961 scope.go:117] "RemoveContainer" containerID="6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4" Dec 05 17:58:11 crc kubenswrapper[4961]: E1205 17:58:11.136097 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4\": container with ID starting with 6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4 not found: ID does not exist" containerID="6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.136127 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4"} err="failed to get container status \"6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4\": rpc error: code = NotFound desc = could not find container \"6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4\": container with ID starting with 6e54bc255696c3e83eb2c188bfa94c8d7f346dff8b312329652e64ab747707b4 not found: ID does not exist" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.136142 4961 scope.go:117] "RemoveContainer" containerID="a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f" Dec 05 17:58:11 crc kubenswrapper[4961]: E1205 17:58:11.136730 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f\": container with ID starting with a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f not found: ID does not exist" containerID="a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f" Dec 05 17:58:11 crc kubenswrapper[4961]: I1205 17:58:11.136785 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f"} err="failed to get container status \"a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f\": rpc error: code = NotFound desc = could not find container \"a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f\": container with ID starting with a2a6fb28a41ce35bb02cdc82f5a33a9dfc6bf3aaf4b7c87f38cde0a7a46b246f not found: ID does not exist" Dec 05 17:58:12 crc kubenswrapper[4961]: I1205 17:58:12.874157 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" path="/var/lib/kubelet/pods/c3b6600b-5933-44dd-b124-3f14ab0ed972/volumes" Dec 05 17:58:27 crc kubenswrapper[4961]: I1205 17:58:27.246663 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:58:27 crc kubenswrapper[4961]: I1205 17:58:27.247079 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:58:57 crc kubenswrapper[4961]: I1205 17:58:57.245484 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:58:57 crc kubenswrapper[4961]: I1205 17:58:57.246116 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.012523 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cnk5c"] Dec 05 17:58:59 crc kubenswrapper[4961]: E1205 17:58:59.013352 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="extract-content" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.013371 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="extract-content" Dec 05 17:58:59 crc kubenswrapper[4961]: E1205 17:58:59.013390 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="extract-utilities" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.013397 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="extract-utilities" Dec 05 17:58:59 crc kubenswrapper[4961]: E1205 17:58:59.013417 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="registry-server" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.013422 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="registry-server" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.013658 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c3b6600b-5933-44dd-b124-3f14ab0ed972" containerName="registry-server" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.015313 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.024443 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cnk5c"] Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.045082 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-catalog-content\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.045163 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sw5xp\" (UniqueName: \"kubernetes.io/projected/60ec6e49-c28f-4d09-9ccb-f15d8637c506-kube-api-access-sw5xp\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.045357 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-utilities\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.148161 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-catalog-content\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.148216 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sw5xp\" (UniqueName: \"kubernetes.io/projected/60ec6e49-c28f-4d09-9ccb-f15d8637c506-kube-api-access-sw5xp\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.148321 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-utilities\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.148906 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-utilities\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.149166 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-catalog-content\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.170828 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sw5xp\" (UniqueName: \"kubernetes.io/projected/60ec6e49-c28f-4d09-9ccb-f15d8637c506-kube-api-access-sw5xp\") pod \"community-operators-cnk5c\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.335380 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:58:59 crc kubenswrapper[4961]: I1205 17:58:59.825819 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cnk5c"] Dec 05 17:59:00 crc kubenswrapper[4961]: I1205 17:59:00.494974 4961 generic.go:334] "Generic (PLEG): container finished" podID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerID="40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7" exitCode=0 Dec 05 17:59:00 crc kubenswrapper[4961]: I1205 17:59:00.495018 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cnk5c" event={"ID":"60ec6e49-c28f-4d09-9ccb-f15d8637c506","Type":"ContainerDied","Data":"40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7"} Dec 05 17:59:00 crc kubenswrapper[4961]: I1205 17:59:00.495317 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cnk5c" event={"ID":"60ec6e49-c28f-4d09-9ccb-f15d8637c506","Type":"ContainerStarted","Data":"f835801e20f590062c6ee64cdcb43196553eb10866730083c19ab71d0d70ba3d"} Dec 05 17:59:00 crc kubenswrapper[4961]: I1205 17:59:00.496823 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 17:59:02 crc kubenswrapper[4961]: I1205 17:59:02.521968 4961 generic.go:334] "Generic (PLEG): container finished" podID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerID="4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1" exitCode=0 Dec 05 17:59:02 crc kubenswrapper[4961]: I1205 17:59:02.522020 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cnk5c" event={"ID":"60ec6e49-c28f-4d09-9ccb-f15d8637c506","Type":"ContainerDied","Data":"4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1"} Dec 05 17:59:03 crc kubenswrapper[4961]: I1205 17:59:03.532801 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cnk5c" event={"ID":"60ec6e49-c28f-4d09-9ccb-f15d8637c506","Type":"ContainerStarted","Data":"26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097"} Dec 05 17:59:03 crc kubenswrapper[4961]: I1205 17:59:03.558829 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cnk5c" podStartSLOduration=3.155196283 podStartE2EDuration="5.558806895s" podCreationTimestamp="2025-12-05 17:58:58 +0000 UTC" firstStartedPulling="2025-12-05 17:59:00.496565176 +0000 UTC m=+1546.557715649" lastFinishedPulling="2025-12-05 17:59:02.900175788 +0000 UTC m=+1548.961326261" observedRunningTime="2025-12-05 17:59:03.552601122 +0000 UTC m=+1549.613751625" watchObservedRunningTime="2025-12-05 17:59:03.558806895 +0000 UTC m=+1549.619957378" Dec 05 17:59:09 crc kubenswrapper[4961]: I1205 17:59:09.336456 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:59:09 crc kubenswrapper[4961]: I1205 17:59:09.337062 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:59:09 crc kubenswrapper[4961]: I1205 17:59:09.381301 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:59:09 crc kubenswrapper[4961]: I1205 17:59:09.642441 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:59:09 crc kubenswrapper[4961]: I1205 17:59:09.688142 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cnk5c"] Dec 05 17:59:11 crc kubenswrapper[4961]: I1205 17:59:11.607196 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cnk5c" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="registry-server" containerID="cri-o://26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097" gracePeriod=2 Dec 05 17:59:11 crc kubenswrapper[4961]: E1205 17:59:11.834994 4961 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ec6e49_c28f_4d09_9ccb_f15d8637c506.slice/crio-26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod60ec6e49_c28f_4d09_9ccb_f15d8637c506.slice/crio-conmon-26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097.scope\": RecentStats: unable to find data in memory cache]" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.032306 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-vtkwg"] Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.034970 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.049697 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vtkwg"] Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.111352 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-utilities\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.111577 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfpmx\" (UniqueName: \"kubernetes.io/projected/c049cf48-b580-4956-96fd-c9bac1884ddd-kube-api-access-zfpmx\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.111607 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-catalog-content\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.149872 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.212993 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sw5xp\" (UniqueName: \"kubernetes.io/projected/60ec6e49-c28f-4d09-9ccb-f15d8637c506-kube-api-access-sw5xp\") pod \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.213130 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-utilities\") pod \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.213253 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-catalog-content\") pod \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\" (UID: \"60ec6e49-c28f-4d09-9ccb-f15d8637c506\") " Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.213679 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfpmx\" (UniqueName: \"kubernetes.io/projected/c049cf48-b580-4956-96fd-c9bac1884ddd-kube-api-access-zfpmx\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.213707 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-catalog-content\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.213758 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-utilities\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.214277 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-utilities\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.214941 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-utilities" (OuterVolumeSpecName: "utilities") pod "60ec6e49-c28f-4d09-9ccb-f15d8637c506" (UID: "60ec6e49-c28f-4d09-9ccb-f15d8637c506"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.215301 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-catalog-content\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.221579 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/60ec6e49-c28f-4d09-9ccb-f15d8637c506-kube-api-access-sw5xp" (OuterVolumeSpecName: "kube-api-access-sw5xp") pod "60ec6e49-c28f-4d09-9ccb-f15d8637c506" (UID: "60ec6e49-c28f-4d09-9ccb-f15d8637c506"). InnerVolumeSpecName "kube-api-access-sw5xp". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.248550 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfpmx\" (UniqueName: \"kubernetes.io/projected/c049cf48-b580-4956-96fd-c9bac1884ddd-kube-api-access-zfpmx\") pod \"redhat-operators-vtkwg\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.275640 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "60ec6e49-c28f-4d09-9ccb-f15d8637c506" (UID: "60ec6e49-c28f-4d09-9ccb-f15d8637c506"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.315739 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.315805 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sw5xp\" (UniqueName: \"kubernetes.io/projected/60ec6e49-c28f-4d09-9ccb-f15d8637c506-kube-api-access-sw5xp\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.315820 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/60ec6e49-c28f-4d09-9ccb-f15d8637c506-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.441215 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.651143 4961 generic.go:334] "Generic (PLEG): container finished" podID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerID="26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097" exitCode=0 Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.651190 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cnk5c" event={"ID":"60ec6e49-c28f-4d09-9ccb-f15d8637c506","Type":"ContainerDied","Data":"26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097"} Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.651216 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cnk5c" event={"ID":"60ec6e49-c28f-4d09-9ccb-f15d8637c506","Type":"ContainerDied","Data":"f835801e20f590062c6ee64cdcb43196553eb10866730083c19ab71d0d70ba3d"} Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.651232 4961 scope.go:117] "RemoveContainer" containerID="26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.651369 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cnk5c" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.680877 4961 scope.go:117] "RemoveContainer" containerID="4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.695845 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cnk5c"] Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.708279 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cnk5c"] Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.749719 4961 scope.go:117] "RemoveContainer" containerID="40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.804904 4961 scope.go:117] "RemoveContainer" containerID="26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097" Dec 05 17:59:12 crc kubenswrapper[4961]: E1205 17:59:12.805821 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097\": container with ID starting with 26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097 not found: ID does not exist" containerID="26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.805854 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097"} err="failed to get container status \"26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097\": rpc error: code = NotFound desc = could not find container \"26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097\": container with ID starting with 26cee0ad6d5c4eab05398669a0c56247ac7c639045cc0cd9dd5afe1abe41d097 not found: ID does not exist" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.805897 4961 scope.go:117] "RemoveContainer" containerID="4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1" Dec 05 17:59:12 crc kubenswrapper[4961]: E1205 17:59:12.806192 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1\": container with ID starting with 4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1 not found: ID does not exist" containerID="4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.806236 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1"} err="failed to get container status \"4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1\": rpc error: code = NotFound desc = could not find container \"4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1\": container with ID starting with 4b1530c67101f936772d2fd66e9e8042808439d4359ba94b8acba4f2ae5c69f1 not found: ID does not exist" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.806262 4961 scope.go:117] "RemoveContainer" containerID="40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7" Dec 05 17:59:12 crc kubenswrapper[4961]: E1205 17:59:12.807000 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7\": container with ID starting with 40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7 not found: ID does not exist" containerID="40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.807042 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7"} err="failed to get container status \"40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7\": rpc error: code = NotFound desc = could not find container \"40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7\": container with ID starting with 40e424c093ac2128fd6c5f604f7c60ff760a3d972aaab621f5a077956c55c6e7 not found: ID does not exist" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.874652 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" path="/var/lib/kubelet/pods/60ec6e49-c28f-4d09-9ccb-f15d8637c506/volumes" Dec 05 17:59:12 crc kubenswrapper[4961]: I1205 17:59:12.919553 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-vtkwg"] Dec 05 17:59:13 crc kubenswrapper[4961]: I1205 17:59:13.660765 4961 generic.go:334] "Generic (PLEG): container finished" podID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerID="34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703" exitCode=0 Dec 05 17:59:13 crc kubenswrapper[4961]: I1205 17:59:13.660981 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerDied","Data":"34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703"} Dec 05 17:59:13 crc kubenswrapper[4961]: I1205 17:59:13.661169 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerStarted","Data":"9a96499832f3b4f153de8343408b0001037e274cb13c386a8c35c9a990c1e846"} Dec 05 17:59:14 crc kubenswrapper[4961]: I1205 17:59:14.690712 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerStarted","Data":"707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4"} Dec 05 17:59:15 crc kubenswrapper[4961]: I1205 17:59:15.702128 4961 generic.go:334] "Generic (PLEG): container finished" podID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerID="707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4" exitCode=0 Dec 05 17:59:15 crc kubenswrapper[4961]: I1205 17:59:15.702188 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerDied","Data":"707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4"} Dec 05 17:59:16 crc kubenswrapper[4961]: I1205 17:59:16.714039 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerStarted","Data":"2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59"} Dec 05 17:59:16 crc kubenswrapper[4961]: I1205 17:59:16.738894 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-vtkwg" podStartSLOduration=1.916095702 podStartE2EDuration="4.738871301s" podCreationTimestamp="2025-12-05 17:59:12 +0000 UTC" firstStartedPulling="2025-12-05 17:59:13.66371639 +0000 UTC m=+1559.724866863" lastFinishedPulling="2025-12-05 17:59:16.486491989 +0000 UTC m=+1562.547642462" observedRunningTime="2025-12-05 17:59:16.733404375 +0000 UTC m=+1562.794554888" watchObservedRunningTime="2025-12-05 17:59:16.738871301 +0000 UTC m=+1562.800021774" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.226517 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vcms7"] Dec 05 17:59:19 crc kubenswrapper[4961]: E1205 17:59:19.227200 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="registry-server" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.227212 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="registry-server" Dec 05 17:59:19 crc kubenswrapper[4961]: E1205 17:59:19.227234 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="extract-utilities" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.227240 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="extract-utilities" Dec 05 17:59:19 crc kubenswrapper[4961]: E1205 17:59:19.227254 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="extract-content" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.227259 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="extract-content" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.227453 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="60ec6e49-c28f-4d09-9ccb-f15d8637c506" containerName="registry-server" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.232921 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.241742 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vcms7"] Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.263323 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-utilities\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.263367 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lfr4d\" (UniqueName: \"kubernetes.io/projected/182ce996-1788-47be-b510-d07e7687d62e-kube-api-access-lfr4d\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.263464 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-catalog-content\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.365169 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-utilities\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.365227 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lfr4d\" (UniqueName: \"kubernetes.io/projected/182ce996-1788-47be-b510-d07e7687d62e-kube-api-access-lfr4d\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.365309 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-catalog-content\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.365891 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-catalog-content\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.366103 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-utilities\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.394340 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lfr4d\" (UniqueName: \"kubernetes.io/projected/182ce996-1788-47be-b510-d07e7687d62e-kube-api-access-lfr4d\") pod \"certified-operators-vcms7\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:19 crc kubenswrapper[4961]: I1205 17:59:19.557562 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:20 crc kubenswrapper[4961]: I1205 17:59:20.135582 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vcms7"] Dec 05 17:59:20 crc kubenswrapper[4961]: W1205 17:59:20.147843 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod182ce996_1788_47be_b510_d07e7687d62e.slice/crio-3401f2d67026f1dbe0b2d4c365d9a43e826e8585d33c2bb93869c5d607b78c7d WatchSource:0}: Error finding container 3401f2d67026f1dbe0b2d4c365d9a43e826e8585d33c2bb93869c5d607b78c7d: Status 404 returned error can't find the container with id 3401f2d67026f1dbe0b2d4c365d9a43e826e8585d33c2bb93869c5d607b78c7d Dec 05 17:59:20 crc kubenswrapper[4961]: I1205 17:59:20.767563 4961 generic.go:334] "Generic (PLEG): container finished" podID="182ce996-1788-47be-b510-d07e7687d62e" containerID="3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323" exitCode=0 Dec 05 17:59:20 crc kubenswrapper[4961]: I1205 17:59:20.767625 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerDied","Data":"3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323"} Dec 05 17:59:20 crc kubenswrapper[4961]: I1205 17:59:20.767661 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerStarted","Data":"3401f2d67026f1dbe0b2d4c365d9a43e826e8585d33c2bb93869c5d607b78c7d"} Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.441575 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.442227 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.496091 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.792812 4961 generic.go:334] "Generic (PLEG): container finished" podID="b9050523-5c05-47cd-9e51-85703488427f" containerID="a94c4392237be263a734bc46dcbe6626f026c487f5b59430a6a474cd47c1a81e" exitCode=0 Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.792882 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" event={"ID":"b9050523-5c05-47cd-9e51-85703488427f","Type":"ContainerDied","Data":"a94c4392237be263a734bc46dcbe6626f026c487f5b59430a6a474cd47c1a81e"} Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.795634 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerStarted","Data":"5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884"} Dec 05 17:59:22 crc kubenswrapper[4961]: I1205 17:59:22.850118 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:23 crc kubenswrapper[4961]: I1205 17:59:23.810979 4961 generic.go:334] "Generic (PLEG): container finished" podID="182ce996-1788-47be-b510-d07e7687d62e" containerID="5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884" exitCode=0 Dec 05 17:59:23 crc kubenswrapper[4961]: I1205 17:59:23.811031 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerDied","Data":"5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884"} Dec 05 17:59:23 crc kubenswrapper[4961]: I1205 17:59:23.814764 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vtkwg"] Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.284302 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.462301 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-inventory\") pod \"b9050523-5c05-47cd-9e51-85703488427f\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.462465 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-ssh-key\") pod \"b9050523-5c05-47cd-9e51-85703488427f\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.462605 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-bootstrap-combined-ca-bundle\") pod \"b9050523-5c05-47cd-9e51-85703488427f\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.462754 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dtfl7\" (UniqueName: \"kubernetes.io/projected/b9050523-5c05-47cd-9e51-85703488427f-kube-api-access-dtfl7\") pod \"b9050523-5c05-47cd-9e51-85703488427f\" (UID: \"b9050523-5c05-47cd-9e51-85703488427f\") " Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.468258 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b9050523-5c05-47cd-9e51-85703488427f" (UID: "b9050523-5c05-47cd-9e51-85703488427f"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.469834 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b9050523-5c05-47cd-9e51-85703488427f-kube-api-access-dtfl7" (OuterVolumeSpecName: "kube-api-access-dtfl7") pod "b9050523-5c05-47cd-9e51-85703488427f" (UID: "b9050523-5c05-47cd-9e51-85703488427f"). InnerVolumeSpecName "kube-api-access-dtfl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.504589 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b9050523-5c05-47cd-9e51-85703488427f" (UID: "b9050523-5c05-47cd-9e51-85703488427f"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.509535 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-inventory" (OuterVolumeSpecName: "inventory") pod "b9050523-5c05-47cd-9e51-85703488427f" (UID: "b9050523-5c05-47cd-9e51-85703488427f"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.565127 4961 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.565354 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dtfl7\" (UniqueName: \"kubernetes.io/projected/b9050523-5c05-47cd-9e51-85703488427f-kube-api-access-dtfl7\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.565476 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.565553 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b9050523-5c05-47cd-9e51-85703488427f-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.848532 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" event={"ID":"b9050523-5c05-47cd-9e51-85703488427f","Type":"ContainerDied","Data":"7741a079198fa7ecf2e6d41bb6f47dde9e55d5d45b8e945288570c505953bd39"} Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.848576 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7741a079198fa7ecf2e6d41bb6f47dde9e55d5d45b8e945288570c505953bd39" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.848576 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/bootstrap-edpm-deployment-openstack-edpm-ipam-84hml" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.858490 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerStarted","Data":"49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092"} Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.858623 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-vtkwg" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="registry-server" containerID="cri-o://2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59" gracePeriod=2 Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.890870 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vcms7" podStartSLOduration=2.3656870899999998 podStartE2EDuration="5.890849524s" podCreationTimestamp="2025-12-05 17:59:19 +0000 UTC" firstStartedPulling="2025-12-05 17:59:20.770340806 +0000 UTC m=+1566.831491289" lastFinishedPulling="2025-12-05 17:59:24.29550325 +0000 UTC m=+1570.356653723" observedRunningTime="2025-12-05 17:59:24.881705636 +0000 UTC m=+1570.942856129" watchObservedRunningTime="2025-12-05 17:59:24.890849524 +0000 UTC m=+1570.952000007" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.914631 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8"] Dec 05 17:59:24 crc kubenswrapper[4961]: E1205 17:59:24.915308 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b9050523-5c05-47cd-9e51-85703488427f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.915337 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b9050523-5c05-47cd-9e51-85703488427f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.915572 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b9050523-5c05-47cd-9e51-85703488427f" containerName="bootstrap-edpm-deployment-openstack-edpm-ipam" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.916364 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.919298 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.919302 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.919874 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.923326 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 17:59:24 crc kubenswrapper[4961]: I1205 17:59:24.932002 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8"] Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.073388 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.074002 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.074204 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfhfb\" (UniqueName: \"kubernetes.io/projected/c2f474d4-f96a-45cd-9432-b90f703a6b81-kube-api-access-pfhfb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.175403 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.175445 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.175518 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfhfb\" (UniqueName: \"kubernetes.io/projected/c2f474d4-f96a-45cd-9432-b90f703a6b81-kube-api-access-pfhfb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.182392 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-inventory\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.182415 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-ssh-key\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.193084 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfhfb\" (UniqueName: \"kubernetes.io/projected/c2f474d4-f96a-45cd-9432-b90f703a6b81-kube-api-access-pfhfb\") pod \"download-cache-edpm-deployment-openstack-edpm-ipam-7trn8\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.308534 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.436816 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.582924 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-catalog-content\") pod \"c049cf48-b580-4956-96fd-c9bac1884ddd\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.583117 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-utilities\") pod \"c049cf48-b580-4956-96fd-c9bac1884ddd\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.583160 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfpmx\" (UniqueName: \"kubernetes.io/projected/c049cf48-b580-4956-96fd-c9bac1884ddd-kube-api-access-zfpmx\") pod \"c049cf48-b580-4956-96fd-c9bac1884ddd\" (UID: \"c049cf48-b580-4956-96fd-c9bac1884ddd\") " Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.584118 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-utilities" (OuterVolumeSpecName: "utilities") pod "c049cf48-b580-4956-96fd-c9bac1884ddd" (UID: "c049cf48-b580-4956-96fd-c9bac1884ddd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.587708 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c049cf48-b580-4956-96fd-c9bac1884ddd-kube-api-access-zfpmx" (OuterVolumeSpecName: "kube-api-access-zfpmx") pod "c049cf48-b580-4956-96fd-c9bac1884ddd" (UID: "c049cf48-b580-4956-96fd-c9bac1884ddd"). InnerVolumeSpecName "kube-api-access-zfpmx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.686333 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.686374 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfpmx\" (UniqueName: \"kubernetes.io/projected/c049cf48-b580-4956-96fd-c9bac1884ddd-kube-api-access-zfpmx\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.699971 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c049cf48-b580-4956-96fd-c9bac1884ddd" (UID: "c049cf48-b580-4956-96fd-c9bac1884ddd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.787819 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c049cf48-b580-4956-96fd-c9bac1884ddd-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:25 crc kubenswrapper[4961]: W1205 17:59:25.855630 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc2f474d4_f96a_45cd_9432_b90f703a6b81.slice/crio-534250acf12712defd93469ba6942c17239d244cd3d40933d9b6942206bf05a6 WatchSource:0}: Error finding container 534250acf12712defd93469ba6942c17239d244cd3d40933d9b6942206bf05a6: Status 404 returned error can't find the container with id 534250acf12712defd93469ba6942c17239d244cd3d40933d9b6942206bf05a6 Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.858940 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8"] Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.875463 4961 generic.go:334] "Generic (PLEG): container finished" podID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerID="2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59" exitCode=0 Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.875531 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-vtkwg" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.875553 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerDied","Data":"2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59"} Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.876130 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-vtkwg" event={"ID":"c049cf48-b580-4956-96fd-c9bac1884ddd","Type":"ContainerDied","Data":"9a96499832f3b4f153de8343408b0001037e274cb13c386a8c35c9a990c1e846"} Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.876156 4961 scope.go:117] "RemoveContainer" containerID="2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.877307 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" event={"ID":"c2f474d4-f96a-45cd-9432-b90f703a6b81","Type":"ContainerStarted","Data":"534250acf12712defd93469ba6942c17239d244cd3d40933d9b6942206bf05a6"} Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.897206 4961 scope.go:117] "RemoveContainer" containerID="707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.940440 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-vtkwg"] Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.951495 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-vtkwg"] Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.956962 4961 scope.go:117] "RemoveContainer" containerID="34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.988185 4961 scope.go:117] "RemoveContainer" containerID="2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59" Dec 05 17:59:25 crc kubenswrapper[4961]: E1205 17:59:25.989407 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59\": container with ID starting with 2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59 not found: ID does not exist" containerID="2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.989449 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59"} err="failed to get container status \"2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59\": rpc error: code = NotFound desc = could not find container \"2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59\": container with ID starting with 2bb931c76f209539663ec4e386e8348f532e5fb8f71b0afd514c4e58f2490f59 not found: ID does not exist" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.989478 4961 scope.go:117] "RemoveContainer" containerID="707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4" Dec 05 17:59:25 crc kubenswrapper[4961]: E1205 17:59:25.989792 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4\": container with ID starting with 707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4 not found: ID does not exist" containerID="707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.989824 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4"} err="failed to get container status \"707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4\": rpc error: code = NotFound desc = could not find container \"707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4\": container with ID starting with 707bcc5d3a48e8b5268c3ae517af6747d4fabb7bf852e73d802151c762ba2ff4 not found: ID does not exist" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.989844 4961 scope.go:117] "RemoveContainer" containerID="34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703" Dec 05 17:59:25 crc kubenswrapper[4961]: E1205 17:59:25.993961 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703\": container with ID starting with 34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703 not found: ID does not exist" containerID="34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703" Dec 05 17:59:25 crc kubenswrapper[4961]: I1205 17:59:25.994024 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703"} err="failed to get container status \"34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703\": rpc error: code = NotFound desc = could not find container \"34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703\": container with ID starting with 34b36b428ef2e97d57a1d04c003d17931cabe7ecaa93c3a4df1ea657c04fb703 not found: ID does not exist" Dec 05 17:59:26 crc kubenswrapper[4961]: I1205 17:59:26.885939 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" path="/var/lib/kubelet/pods/c049cf48-b580-4956-96fd-c9bac1884ddd/volumes" Dec 05 17:59:26 crc kubenswrapper[4961]: I1205 17:59:26.893796 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" event={"ID":"c2f474d4-f96a-45cd-9432-b90f703a6b81","Type":"ContainerStarted","Data":"4d47630dfa5e14639b75b2e07af6b48057f68cc28b37faf5c274d82a83274cd7"} Dec 05 17:59:26 crc kubenswrapper[4961]: I1205 17:59:26.922871 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" podStartSLOduration=2.457473367 podStartE2EDuration="2.922843721s" podCreationTimestamp="2025-12-05 17:59:24 +0000 UTC" firstStartedPulling="2025-12-05 17:59:25.8612625 +0000 UTC m=+1571.922412993" lastFinishedPulling="2025-12-05 17:59:26.326632874 +0000 UTC m=+1572.387783347" observedRunningTime="2025-12-05 17:59:26.917569849 +0000 UTC m=+1572.978720342" watchObservedRunningTime="2025-12-05 17:59:26.922843721 +0000 UTC m=+1572.983994204" Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.246408 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.246516 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.246597 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.247976 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.248039 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" gracePeriod=600 Dec 05 17:59:27 crc kubenswrapper[4961]: E1205 17:59:27.370385 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.907733 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" exitCode=0 Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.907849 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9"} Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.908178 4961 scope.go:117] "RemoveContainer" containerID="511b0e3c93cece387d56804a522b75bb0ef062e17cb5f9c42dba313a290e70af" Dec 05 17:59:27 crc kubenswrapper[4961]: I1205 17:59:27.909066 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 17:59:27 crc kubenswrapper[4961]: E1205 17:59:27.909400 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 17:59:29 crc kubenswrapper[4961]: I1205 17:59:29.558333 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:29 crc kubenswrapper[4961]: I1205 17:59:29.558706 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:29 crc kubenswrapper[4961]: I1205 17:59:29.606492 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:29 crc kubenswrapper[4961]: I1205 17:59:29.980286 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:31 crc kubenswrapper[4961]: I1205 17:59:31.630437 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vcms7"] Dec 05 17:59:31 crc kubenswrapper[4961]: I1205 17:59:31.947907 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vcms7" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="registry-server" containerID="cri-o://49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092" gracePeriod=2 Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.487218 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.631353 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-catalog-content\") pod \"182ce996-1788-47be-b510-d07e7687d62e\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.631461 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lfr4d\" (UniqueName: \"kubernetes.io/projected/182ce996-1788-47be-b510-d07e7687d62e-kube-api-access-lfr4d\") pod \"182ce996-1788-47be-b510-d07e7687d62e\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.631604 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-utilities\") pod \"182ce996-1788-47be-b510-d07e7687d62e\" (UID: \"182ce996-1788-47be-b510-d07e7687d62e\") " Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.632520 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-utilities" (OuterVolumeSpecName: "utilities") pod "182ce996-1788-47be-b510-d07e7687d62e" (UID: "182ce996-1788-47be-b510-d07e7687d62e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.651936 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/182ce996-1788-47be-b510-d07e7687d62e-kube-api-access-lfr4d" (OuterVolumeSpecName: "kube-api-access-lfr4d") pod "182ce996-1788-47be-b510-d07e7687d62e" (UID: "182ce996-1788-47be-b510-d07e7687d62e"). InnerVolumeSpecName "kube-api-access-lfr4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.687916 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "182ce996-1788-47be-b510-d07e7687d62e" (UID: "182ce996-1788-47be-b510-d07e7687d62e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.734357 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.734408 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lfr4d\" (UniqueName: \"kubernetes.io/projected/182ce996-1788-47be-b510-d07e7687d62e-kube-api-access-lfr4d\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.734424 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/182ce996-1788-47be-b510-d07e7687d62e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.963974 4961 generic.go:334] "Generic (PLEG): container finished" podID="182ce996-1788-47be-b510-d07e7687d62e" containerID="49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092" exitCode=0 Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.964026 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerDied","Data":"49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092"} Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.964059 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vcms7" event={"ID":"182ce996-1788-47be-b510-d07e7687d62e","Type":"ContainerDied","Data":"3401f2d67026f1dbe0b2d4c365d9a43e826e8585d33c2bb93869c5d607b78c7d"} Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.964081 4961 scope.go:117] "RemoveContainer" containerID="49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.964087 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vcms7" Dec 05 17:59:32 crc kubenswrapper[4961]: I1205 17:59:32.995316 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vcms7"] Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.004433 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vcms7"] Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.005322 4961 scope.go:117] "RemoveContainer" containerID="5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.029985 4961 scope.go:117] "RemoveContainer" containerID="3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.118230 4961 scope.go:117] "RemoveContainer" containerID="49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092" Dec 05 17:59:33 crc kubenswrapper[4961]: E1205 17:59:33.118932 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092\": container with ID starting with 49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092 not found: ID does not exist" containerID="49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.118978 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092"} err="failed to get container status \"49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092\": rpc error: code = NotFound desc = could not find container \"49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092\": container with ID starting with 49e6d3d70539c523f8d45e0eb3494137b1ec0924de7e9da94f76c79daf740092 not found: ID does not exist" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.119012 4961 scope.go:117] "RemoveContainer" containerID="5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884" Dec 05 17:59:33 crc kubenswrapper[4961]: E1205 17:59:33.119564 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884\": container with ID starting with 5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884 not found: ID does not exist" containerID="5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.119626 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884"} err="failed to get container status \"5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884\": rpc error: code = NotFound desc = could not find container \"5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884\": container with ID starting with 5b5cc11827c7f3334e64654e832ea2373febfc7ceb218d2e2a2144dde6492884 not found: ID does not exist" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.119677 4961 scope.go:117] "RemoveContainer" containerID="3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323" Dec 05 17:59:33 crc kubenswrapper[4961]: E1205 17:59:33.120138 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323\": container with ID starting with 3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323 not found: ID does not exist" containerID="3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323" Dec 05 17:59:33 crc kubenswrapper[4961]: I1205 17:59:33.120184 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323"} err="failed to get container status \"3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323\": rpc error: code = NotFound desc = could not find container \"3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323\": container with ID starting with 3c33700b94332d7eb85f845cd3bf41efefabefbe0a47112eef9688349f266323 not found: ID does not exist" Dec 05 17:59:34 crc kubenswrapper[4961]: I1205 17:59:34.878182 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="182ce996-1788-47be-b510-d07e7687d62e" path="/var/lib/kubelet/pods/182ce996-1788-47be-b510-d07e7687d62e/volumes" Dec 05 17:59:41 crc kubenswrapper[4961]: I1205 17:59:41.863986 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 17:59:41 crc kubenswrapper[4961]: E1205 17:59:41.864819 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 17:59:55 crc kubenswrapper[4961]: I1205 17:59:55.863584 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 17:59:55 crc kubenswrapper[4961]: E1205 17:59:55.864409 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.058593 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-create-9jlnd"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.071904 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-3f8b-account-create-update-p894d"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.087952 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-4c80-account-create-update-rkrgv"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.098497 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-create-qn94r"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.107097 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-create-9jlnd"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.115130 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-cf7d-account-create-update-ql94j"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.124344 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-create-c45bb"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.132843 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-3f8b-account-create-update-p894d"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.141261 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-create-qn94r"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.150412 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-cf7d-account-create-update-ql94j"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.160448 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-create-c45bb"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.170530 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-4c80-account-create-update-rkrgv"] Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.875065 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3c4069b0-74bf-4e21-a280-ae69cfcbcd9a" path="/var/lib/kubelet/pods/3c4069b0-74bf-4e21-a280-ae69cfcbcd9a/volumes" Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.876079 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cbe3354-8dc0-4907-b2b3-38a260064997" path="/var/lib/kubelet/pods/3cbe3354-8dc0-4907-b2b3-38a260064997/volumes" Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.876948 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6" path="/var/lib/kubelet/pods/5c4d7c4a-2b2b-4b3a-934d-d6e847eed0d6/volumes" Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.877655 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="70c92280-ca86-4655-bd02-85c60adfe674" path="/var/lib/kubelet/pods/70c92280-ca86-4655-bd02-85c60adfe674/volumes" Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.878955 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d90d35e5-699e-4f11-8f43-97c60a4364d9" path="/var/lib/kubelet/pods/d90d35e5-699e-4f11-8f43-97c60a4364d9/volumes" Dec 05 17:59:58 crc kubenswrapper[4961]: I1205 17:59:58.879588 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f2b416cd-22e0-4619-9fcf-c8329062a46a" path="/var/lib/kubelet/pods/f2b416cd-22e0-4619-9fcf-c8329062a46a/volumes" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.175570 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh"] Dec 05 18:00:00 crc kubenswrapper[4961]: E1205 18:00:00.176087 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="registry-server" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176104 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="registry-server" Dec 05 18:00:00 crc kubenswrapper[4961]: E1205 18:00:00.176116 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="extract-content" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176124 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="extract-content" Dec 05 18:00:00 crc kubenswrapper[4961]: E1205 18:00:00.176146 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="extract-utilities" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176155 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="extract-utilities" Dec 05 18:00:00 crc kubenswrapper[4961]: E1205 18:00:00.176175 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="extract-content" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176184 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="extract-content" Dec 05 18:00:00 crc kubenswrapper[4961]: E1205 18:00:00.176210 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="extract-utilities" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176218 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="extract-utilities" Dec 05 18:00:00 crc kubenswrapper[4961]: E1205 18:00:00.176228 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="registry-server" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176237 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="registry-server" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176458 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="182ce996-1788-47be-b510-d07e7687d62e" containerName="registry-server" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.176490 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c049cf48-b580-4956-96fd-c9bac1884ddd" containerName="registry-server" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.180454 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.186190 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.186225 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.187534 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh"] Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.347124 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceb51243-ac57-4363-b900-2f493c6a526a-secret-volume\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.347263 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceb51243-ac57-4363-b900-2f493c6a526a-config-volume\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.347338 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-glhr7\" (UniqueName: \"kubernetes.io/projected/ceb51243-ac57-4363-b900-2f493c6a526a-kube-api-access-glhr7\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.449480 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceb51243-ac57-4363-b900-2f493c6a526a-config-volume\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.449573 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-glhr7\" (UniqueName: \"kubernetes.io/projected/ceb51243-ac57-4363-b900-2f493c6a526a-kube-api-access-glhr7\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.449706 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceb51243-ac57-4363-b900-2f493c6a526a-secret-volume\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.450502 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceb51243-ac57-4363-b900-2f493c6a526a-config-volume\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.459475 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceb51243-ac57-4363-b900-2f493c6a526a-secret-volume\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.468955 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-glhr7\" (UniqueName: \"kubernetes.io/projected/ceb51243-ac57-4363-b900-2f493c6a526a-kube-api-access-glhr7\") pod \"collect-profiles-29415960-7rgnh\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.503620 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:00 crc kubenswrapper[4961]: I1205 18:00:00.983726 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh"] Dec 05 18:00:01 crc kubenswrapper[4961]: I1205 18:00:01.833579 4961 generic.go:334] "Generic (PLEG): container finished" podID="ceb51243-ac57-4363-b900-2f493c6a526a" containerID="914cc5ec897e30f8173e466106c4a3524d3dbb65e5bc2dfbde6cf03661ab3d5c" exitCode=0 Dec 05 18:00:01 crc kubenswrapper[4961]: I1205 18:00:01.833683 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" event={"ID":"ceb51243-ac57-4363-b900-2f493c6a526a","Type":"ContainerDied","Data":"914cc5ec897e30f8173e466106c4a3524d3dbb65e5bc2dfbde6cf03661ab3d5c"} Dec 05 18:00:01 crc kubenswrapper[4961]: I1205 18:00:01.833982 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" event={"ID":"ceb51243-ac57-4363-b900-2f493c6a526a","Type":"ContainerStarted","Data":"f5011e91ec238f273585b61e13340cde853ceb637f0009eab2501b0b9f184abc"} Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.207125 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.307889 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceb51243-ac57-4363-b900-2f493c6a526a-secret-volume\") pod \"ceb51243-ac57-4363-b900-2f493c6a526a\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.308131 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-glhr7\" (UniqueName: \"kubernetes.io/projected/ceb51243-ac57-4363-b900-2f493c6a526a-kube-api-access-glhr7\") pod \"ceb51243-ac57-4363-b900-2f493c6a526a\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.308238 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceb51243-ac57-4363-b900-2f493c6a526a-config-volume\") pod \"ceb51243-ac57-4363-b900-2f493c6a526a\" (UID: \"ceb51243-ac57-4363-b900-2f493c6a526a\") " Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.309252 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ceb51243-ac57-4363-b900-2f493c6a526a-config-volume" (OuterVolumeSpecName: "config-volume") pod "ceb51243-ac57-4363-b900-2f493c6a526a" (UID: "ceb51243-ac57-4363-b900-2f493c6a526a"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.316052 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ceb51243-ac57-4363-b900-2f493c6a526a-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "ceb51243-ac57-4363-b900-2f493c6a526a" (UID: "ceb51243-ac57-4363-b900-2f493c6a526a"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.318756 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ceb51243-ac57-4363-b900-2f493c6a526a-kube-api-access-glhr7" (OuterVolumeSpecName: "kube-api-access-glhr7") pod "ceb51243-ac57-4363-b900-2f493c6a526a" (UID: "ceb51243-ac57-4363-b900-2f493c6a526a"). InnerVolumeSpecName "kube-api-access-glhr7". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.411228 4961 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/ceb51243-ac57-4363-b900-2f493c6a526a-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.411276 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-glhr7\" (UniqueName: \"kubernetes.io/projected/ceb51243-ac57-4363-b900-2f493c6a526a-kube-api-access-glhr7\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.411290 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/ceb51243-ac57-4363-b900-2f493c6a526a-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.855063 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" event={"ID":"ceb51243-ac57-4363-b900-2f493c6a526a","Type":"ContainerDied","Data":"f5011e91ec238f273585b61e13340cde853ceb637f0009eab2501b0b9f184abc"} Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.855103 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5011e91ec238f273585b61e13340cde853ceb637f0009eab2501b0b9f184abc" Dec 05 18:00:03 crc kubenswrapper[4961]: I1205 18:00:03.855108 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh" Dec 05 18:00:10 crc kubenswrapper[4961]: I1205 18:00:10.864393 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:00:10 crc kubenswrapper[4961]: E1205 18:00:10.865188 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:00:21 crc kubenswrapper[4961]: I1205 18:00:21.864001 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:00:21 crc kubenswrapper[4961]: E1205 18:00:21.864921 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:00:33 crc kubenswrapper[4961]: I1205 18:00:33.863068 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:00:33 crc kubenswrapper[4961]: E1205 18:00:33.863950 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:00:42 crc kubenswrapper[4961]: I1205 18:00:42.592376 4961 scope.go:117] "RemoveContainer" containerID="79f9b3d54473091cb81b6cec9b88dd26027f21f7e9a2be654aaaf8c8c001e420" Dec 05 18:00:42 crc kubenswrapper[4961]: I1205 18:00:42.614945 4961 scope.go:117] "RemoveContainer" containerID="6083168115dd228e12865685f426a7904025197e64de7785cb9a6bc158c8d2cc" Dec 05 18:00:42 crc kubenswrapper[4961]: I1205 18:00:42.682285 4961 scope.go:117] "RemoveContainer" containerID="3e505e6137404d05c1de363640e5671731743a4a0a1f9dd7b6cacb74b9665149" Dec 05 18:00:42 crc kubenswrapper[4961]: I1205 18:00:42.719808 4961 scope.go:117] "RemoveContainer" containerID="a6370ab9954798b978c7ad927208085500e8292d87a042681373823797fab0a0" Dec 05 18:00:42 crc kubenswrapper[4961]: I1205 18:00:42.757908 4961 scope.go:117] "RemoveContainer" containerID="4ef62857d02f7fd3e385d7897cd58deda78db0018020b80ad34bf47aeb7d058b" Dec 05 18:00:42 crc kubenswrapper[4961]: I1205 18:00:42.805504 4961 scope.go:117] "RemoveContainer" containerID="a6963f6e620d22e7e65c9339a5f343ae74d71bf2bf7e51b4b63fe8fdd72564f8" Dec 05 18:00:46 crc kubenswrapper[4961]: I1205 18:00:46.864195 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:00:46 crc kubenswrapper[4961]: E1205 18:00:46.864777 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.073410 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-create-wwxhc"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.091924 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-bf35-account-create-update-5fmjn"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.100561 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-create-ssvs5"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.108957 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-07f9-account-create-update-jcqf5"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.117865 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-bf35-account-create-update-5fmjn"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.125913 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-create-wwxhc"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.134785 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-07f9-account-create-update-jcqf5"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.143417 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/glance-db-sync-ghqpp"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.153997 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-0b2d-account-create-update-89tcd"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.163179 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-create-ssvs5"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.172397 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-create-6txbs"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.180107 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/glance-db-sync-ghqpp"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.188147 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-0b2d-account-create-update-89tcd"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.196031 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-create-6txbs"] Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.873588 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0ec59b22-f0c6-4de1-a447-59eb40a7c89d" path="/var/lib/kubelet/pods/0ec59b22-f0c6-4de1-a447-59eb40a7c89d/volumes" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.874437 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82" path="/var/lib/kubelet/pods/1980c80e-ddaf-4cc8-84ee-e5a4d6d6cf82/volumes" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.875122 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2673794d-8d1a-4b28-9988-65c916ae70b0" path="/var/lib/kubelet/pods/2673794d-8d1a-4b28-9988-65c916ae70b0/volumes" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.875723 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5209837f-e8f0-4db4-ba7d-2af20947be50" path="/var/lib/kubelet/pods/5209837f-e8f0-4db4-ba7d-2af20947be50/volumes" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.876821 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="919d9712-f294-46aa-95e9-b1c166d3bf5a" path="/var/lib/kubelet/pods/919d9712-f294-46aa-95e9-b1c166d3bf5a/volumes" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.877347 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9aee3a36-f57d-4c81-9627-df1f4ea436be" path="/var/lib/kubelet/pods/9aee3a36-f57d-4c81-9627-df1f4ea436be/volumes" Dec 05 18:00:48 crc kubenswrapper[4961]: I1205 18:00:48.877866 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea9126a1-ef8a-42ae-a94c-596b287fa74d" path="/var/lib/kubelet/pods/ea9126a1-ef8a-42ae-a94c-596b287fa74d/volumes" Dec 05 18:00:54 crc kubenswrapper[4961]: I1205 18:00:54.057348 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-db-sync-dmt4c"] Dec 05 18:00:54 crc kubenswrapper[4961]: I1205 18:00:54.071139 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-db-sync-dmt4c"] Dec 05 18:00:54 crc kubenswrapper[4961]: I1205 18:00:54.876423 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eaedc2cc-4238-4831-ad1a-1c260e8bec24" path="/var/lib/kubelet/pods/eaedc2cc-4238-4831-ad1a-1c260e8bec24/volumes" Dec 05 18:00:57 crc kubenswrapper[4961]: I1205 18:00:57.407517 4961 generic.go:334] "Generic (PLEG): container finished" podID="c2f474d4-f96a-45cd-9432-b90f703a6b81" containerID="4d47630dfa5e14639b75b2e07af6b48057f68cc28b37faf5c274d82a83274cd7" exitCode=0 Dec 05 18:00:57 crc kubenswrapper[4961]: I1205 18:00:57.407837 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" event={"ID":"c2f474d4-f96a-45cd-9432-b90f703a6b81","Type":"ContainerDied","Data":"4d47630dfa5e14639b75b2e07af6b48057f68cc28b37faf5c274d82a83274cd7"} Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.808967 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.832258 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfhfb\" (UniqueName: \"kubernetes.io/projected/c2f474d4-f96a-45cd-9432-b90f703a6b81-kube-api-access-pfhfb\") pod \"c2f474d4-f96a-45cd-9432-b90f703a6b81\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.832365 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-ssh-key\") pod \"c2f474d4-f96a-45cd-9432-b90f703a6b81\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.832408 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-inventory\") pod \"c2f474d4-f96a-45cd-9432-b90f703a6b81\" (UID: \"c2f474d4-f96a-45cd-9432-b90f703a6b81\") " Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.843812 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2f474d4-f96a-45cd-9432-b90f703a6b81-kube-api-access-pfhfb" (OuterVolumeSpecName: "kube-api-access-pfhfb") pod "c2f474d4-f96a-45cd-9432-b90f703a6b81" (UID: "c2f474d4-f96a-45cd-9432-b90f703a6b81"). InnerVolumeSpecName "kube-api-access-pfhfb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.864723 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:00:58 crc kubenswrapper[4961]: E1205 18:00:58.865014 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.867971 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "c2f474d4-f96a-45cd-9432-b90f703a6b81" (UID: "c2f474d4-f96a-45cd-9432-b90f703a6b81"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.869472 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-inventory" (OuterVolumeSpecName: "inventory") pod "c2f474d4-f96a-45cd-9432-b90f703a6b81" (UID: "c2f474d4-f96a-45cd-9432-b90f703a6b81"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.935254 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.935289 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/c2f474d4-f96a-45cd-9432-b90f703a6b81-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:58 crc kubenswrapper[4961]: I1205 18:00:58.935304 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfhfb\" (UniqueName: \"kubernetes.io/projected/c2f474d4-f96a-45cd-9432-b90f703a6b81-kube-api-access-pfhfb\") on node \"crc\" DevicePath \"\"" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.431598 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" event={"ID":"c2f474d4-f96a-45cd-9432-b90f703a6b81","Type":"ContainerDied","Data":"534250acf12712defd93469ba6942c17239d244cd3d40933d9b6942206bf05a6"} Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.432109 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="534250acf12712defd93469ba6942c17239d244cd3d40933d9b6942206bf05a6" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.432233 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/download-cache-edpm-deployment-openstack-edpm-ipam-7trn8" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.519828 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2"] Dec 05 18:00:59 crc kubenswrapper[4961]: E1205 18:00:59.520325 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2f474d4-f96a-45cd-9432-b90f703a6b81" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.520346 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2f474d4-f96a-45cd-9432-b90f703a6b81" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 18:00:59 crc kubenswrapper[4961]: E1205 18:00:59.520382 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ceb51243-ac57-4363-b900-2f493c6a526a" containerName="collect-profiles" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.520392 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ceb51243-ac57-4363-b900-2f493c6a526a" containerName="collect-profiles" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.520657 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ceb51243-ac57-4363-b900-2f493c6a526a" containerName="collect-profiles" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.520698 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2f474d4-f96a-45cd-9432-b90f703a6b81" containerName="download-cache-edpm-deployment-openstack-edpm-ipam" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.521509 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.525561 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.526018 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.526242 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.526391 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.533397 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2"] Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.649436 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d692q\" (UniqueName: \"kubernetes.io/projected/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-kube-api-access-d692q\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.649486 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.650172 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.752063 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.752230 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d692q\" (UniqueName: \"kubernetes.io/projected/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-kube-api-access-d692q\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.752292 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.757252 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-inventory\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.764176 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-ssh-key\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.768311 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d692q\" (UniqueName: \"kubernetes.io/projected/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-kube-api-access-d692q\") pod \"configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:00:59 crc kubenswrapper[4961]: I1205 18:00:59.853003 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.135511 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/keystone-cron-29415961-66rdg"] Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.137890 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.148593 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415961-66rdg"] Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.263385 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-fernet-keys\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.263696 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-config-data\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.263746 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-combined-ca-bundle\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.263856 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vwgdc\" (UniqueName: \"kubernetes.io/projected/1f248e91-833b-4136-ad16-f32f9aff9513-kube-api-access-vwgdc\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.365482 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-config-data\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.365523 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-combined-ca-bundle\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.365549 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vwgdc\" (UniqueName: \"kubernetes.io/projected/1f248e91-833b-4136-ad16-f32f9aff9513-kube-api-access-vwgdc\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.365613 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-fernet-keys\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.372639 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-combined-ca-bundle\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.373476 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-config-data\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.380088 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-fernet-keys\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.385039 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vwgdc\" (UniqueName: \"kubernetes.io/projected/1f248e91-833b-4136-ad16-f32f9aff9513-kube-api-access-vwgdc\") pod \"keystone-cron-29415961-66rdg\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.421883 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2"] Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.442408 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" event={"ID":"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa","Type":"ContainerStarted","Data":"d42bce013dd052b8c5ea87a4ddae10f5d03ead3fe1687506bfb0a8db90bf785d"} Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.467365 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:00 crc kubenswrapper[4961]: W1205 18:01:00.889441 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1f248e91_833b_4136_ad16_f32f9aff9513.slice/crio-5d7025076f328840f42e256e845fd146982cc0a98ca29c0a3842ce0ef205e494 WatchSource:0}: Error finding container 5d7025076f328840f42e256e845fd146982cc0a98ca29c0a3842ce0ef205e494: Status 404 returned error can't find the container with id 5d7025076f328840f42e256e845fd146982cc0a98ca29c0a3842ce0ef205e494 Dec 05 18:01:00 crc kubenswrapper[4961]: I1205 18:01:00.894539 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/keystone-cron-29415961-66rdg"] Dec 05 18:01:01 crc kubenswrapper[4961]: I1205 18:01:01.452720 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" event={"ID":"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa","Type":"ContainerStarted","Data":"36919b3e6f4438c91a79ab7ef9727a4e05aa2ff0bc84352326f8d56194f90104"} Dec 05 18:01:01 crc kubenswrapper[4961]: I1205 18:01:01.454406 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-66rdg" event={"ID":"1f248e91-833b-4136-ad16-f32f9aff9513","Type":"ContainerStarted","Data":"9a83aa4aad94983cb4416debd0821a98ccd8dad3b9c90c8f50c3e283bb8f93a2"} Dec 05 18:01:01 crc kubenswrapper[4961]: I1205 18:01:01.454458 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-66rdg" event={"ID":"1f248e91-833b-4136-ad16-f32f9aff9513","Type":"ContainerStarted","Data":"5d7025076f328840f42e256e845fd146982cc0a98ca29c0a3842ce0ef205e494"} Dec 05 18:01:01 crc kubenswrapper[4961]: I1205 18:01:01.483719 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" podStartSLOduration=2.025890109 podStartE2EDuration="2.483692456s" podCreationTimestamp="2025-12-05 18:00:59 +0000 UTC" firstStartedPulling="2025-12-05 18:01:00.42955309 +0000 UTC m=+1666.490703563" lastFinishedPulling="2025-12-05 18:01:00.887355437 +0000 UTC m=+1666.948505910" observedRunningTime="2025-12-05 18:01:01.466304684 +0000 UTC m=+1667.527455167" watchObservedRunningTime="2025-12-05 18:01:01.483692456 +0000 UTC m=+1667.544842929" Dec 05 18:01:01 crc kubenswrapper[4961]: I1205 18:01:01.491626 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/keystone-cron-29415961-66rdg" podStartSLOduration=1.491608663 podStartE2EDuration="1.491608663s" podCreationTimestamp="2025-12-05 18:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:01:01.490467385 +0000 UTC m=+1667.551617858" watchObservedRunningTime="2025-12-05 18:01:01.491608663 +0000 UTC m=+1667.552759136" Dec 05 18:01:03 crc kubenswrapper[4961]: I1205 18:01:03.471914 4961 generic.go:334] "Generic (PLEG): container finished" podID="1f248e91-833b-4136-ad16-f32f9aff9513" containerID="9a83aa4aad94983cb4416debd0821a98ccd8dad3b9c90c8f50c3e283bb8f93a2" exitCode=0 Dec 05 18:01:03 crc kubenswrapper[4961]: I1205 18:01:03.471986 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-66rdg" event={"ID":"1f248e91-833b-4136-ad16-f32f9aff9513","Type":"ContainerDied","Data":"9a83aa4aad94983cb4416debd0821a98ccd8dad3b9c90c8f50c3e283bb8f93a2"} Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.822592 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.952466 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-config-data\") pod \"1f248e91-833b-4136-ad16-f32f9aff9513\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.952866 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-combined-ca-bundle\") pod \"1f248e91-833b-4136-ad16-f32f9aff9513\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.952991 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vwgdc\" (UniqueName: \"kubernetes.io/projected/1f248e91-833b-4136-ad16-f32f9aff9513-kube-api-access-vwgdc\") pod \"1f248e91-833b-4136-ad16-f32f9aff9513\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.953147 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-fernet-keys\") pod \"1f248e91-833b-4136-ad16-f32f9aff9513\" (UID: \"1f248e91-833b-4136-ad16-f32f9aff9513\") " Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.957647 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1f248e91-833b-4136-ad16-f32f9aff9513-kube-api-access-vwgdc" (OuterVolumeSpecName: "kube-api-access-vwgdc") pod "1f248e91-833b-4136-ad16-f32f9aff9513" (UID: "1f248e91-833b-4136-ad16-f32f9aff9513"). InnerVolumeSpecName "kube-api-access-vwgdc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.957703 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "1f248e91-833b-4136-ad16-f32f9aff9513" (UID: "1f248e91-833b-4136-ad16-f32f9aff9513"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:01:04 crc kubenswrapper[4961]: I1205 18:01:04.989664 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "1f248e91-833b-4136-ad16-f32f9aff9513" (UID: "1f248e91-833b-4136-ad16-f32f9aff9513"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.004945 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-config-data" (OuterVolumeSpecName: "config-data") pod "1f248e91-833b-4136-ad16-f32f9aff9513" (UID: "1f248e91-833b-4136-ad16-f32f9aff9513"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.055521 4961 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-fernet-keys\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.055562 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.055575 4961 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/1f248e91-833b-4136-ad16-f32f9aff9513-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.055588 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vwgdc\" (UniqueName: \"kubernetes.io/projected/1f248e91-833b-4136-ad16-f32f9aff9513-kube-api-access-vwgdc\") on node \"crc\" DevicePath \"\"" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.494648 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/keystone-cron-29415961-66rdg" event={"ID":"1f248e91-833b-4136-ad16-f32f9aff9513","Type":"ContainerDied","Data":"5d7025076f328840f42e256e845fd146982cc0a98ca29c0a3842ce0ef205e494"} Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.494684 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5d7025076f328840f42e256e845fd146982cc0a98ca29c0a3842ce0ef205e494" Dec 05 18:01:05 crc kubenswrapper[4961]: I1205 18:01:05.494725 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/keystone-cron-29415961-66rdg" Dec 05 18:01:12 crc kubenswrapper[4961]: I1205 18:01:12.864469 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:01:12 crc kubenswrapper[4961]: E1205 18:01:12.865425 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:01:27 crc kubenswrapper[4961]: I1205 18:01:27.045235 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/neutron-db-sync-sd82p"] Dec 05 18:01:27 crc kubenswrapper[4961]: I1205 18:01:27.061655 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/neutron-db-sync-sd82p"] Dec 05 18:01:27 crc kubenswrapper[4961]: I1205 18:01:27.863320 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:01:27 crc kubenswrapper[4961]: E1205 18:01:27.863873 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:01:28 crc kubenswrapper[4961]: I1205 18:01:28.885576 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b58877bc-157d-4919-9418-e5b306dff028" path="/var/lib/kubelet/pods/b58877bc-157d-4919-9418-e5b306dff028/volumes" Dec 05 18:01:31 crc kubenswrapper[4961]: I1205 18:01:31.047698 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/placement-db-sync-k7c2f"] Dec 05 18:01:31 crc kubenswrapper[4961]: I1205 18:01:31.059258 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/placement-db-sync-k7c2f"] Dec 05 18:01:32 crc kubenswrapper[4961]: I1205 18:01:32.873449 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="125fc58a-f251-4b81-98e7-eca6a2c72a8e" path="/var/lib/kubelet/pods/125fc58a-f251-4b81-98e7-eca6a2c72a8e/volumes" Dec 05 18:01:41 crc kubenswrapper[4961]: I1205 18:01:41.034966 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/keystone-bootstrap-vlqz4"] Dec 05 18:01:41 crc kubenswrapper[4961]: I1205 18:01:41.047572 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/barbican-db-sync-9q2wd"] Dec 05 18:01:41 crc kubenswrapper[4961]: I1205 18:01:41.057677 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/keystone-bootstrap-vlqz4"] Dec 05 18:01:41 crc kubenswrapper[4961]: I1205 18:01:41.067435 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/barbican-db-sync-9q2wd"] Dec 05 18:01:41 crc kubenswrapper[4961]: I1205 18:01:41.863946 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:01:41 crc kubenswrapper[4961]: E1205 18:01:41.864532 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:01:42 crc kubenswrapper[4961]: I1205 18:01:42.874913 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2cdd7a2f-7aea-4443-877f-a10bfb0a8512" path="/var/lib/kubelet/pods/2cdd7a2f-7aea-4443-877f-a10bfb0a8512/volumes" Dec 05 18:01:42 crc kubenswrapper[4961]: I1205 18:01:42.875621 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c3a753a7-88b4-4e0c-a1c6-82e79643c6b0" path="/var/lib/kubelet/pods/c3a753a7-88b4-4e0c-a1c6-82e79643c6b0/volumes" Dec 05 18:01:42 crc kubenswrapper[4961]: I1205 18:01:42.942325 4961 scope.go:117] "RemoveContainer" containerID="da9a97b4359f1460049f4a5cc7ea0b44b1acfd3a1a9492323cae3d97df6ea6ea" Dec 05 18:01:42 crc kubenswrapper[4961]: I1205 18:01:42.982368 4961 scope.go:117] "RemoveContainer" containerID="4050ed7379c04d5387701f9805b61b386236cc4ac4ae79db61a625cadb152ff2" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.041827 4961 scope.go:117] "RemoveContainer" containerID="c7775548c61a86060018c3fa198cfb2dd6a5afa042f10c3609bc3d1fe578f90c" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.080942 4961 scope.go:117] "RemoveContainer" containerID="77b63205c0b9058f2486caf506c53645a7e591ccfeadb412cdd2905f65c6dcd5" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.144288 4961 scope.go:117] "RemoveContainer" containerID="22ef03d3a529b02d14b357ac4db14950e604db2b339187b53fdc5824efc75dab" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.176719 4961 scope.go:117] "RemoveContainer" containerID="c40f9ceb45ddbcc3087afab86de2d6c09a8b2a30f24fde7d75109bb4222e2c3b" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.208056 4961 scope.go:117] "RemoveContainer" containerID="b0f14d72e67dbe1c466aa0b1f46aac1c0805eac023e62263017206534e3d1bbe" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.258311 4961 scope.go:117] "RemoveContainer" containerID="392c07a98fdc0c8fc13fd55b762070b990a1ec7f43be6109b96e28a26d14172d" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.282379 4961 scope.go:117] "RemoveContainer" containerID="1e81862d6f1a3abea7d143ce5db4b95b941b0aab8e9996f7be4a3c30bc693ed7" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.307925 4961 scope.go:117] "RemoveContainer" containerID="ce1d1257db7bf4af890bcf579537afadcddcd5e309fa270bf80f9b5904b3d593" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.328525 4961 scope.go:117] "RemoveContainer" containerID="266fab2cad3e0fba693e0eed0aa20a322bbe1c902e28ddd8d9d5bfcd17ebc458" Dec 05 18:01:43 crc kubenswrapper[4961]: I1205 18:01:43.355364 4961 scope.go:117] "RemoveContainer" containerID="b3ef8b94d3f80ab569bdc0cfc4a932c06ec7a3c41710f129d9a36eb373829953" Dec 05 18:01:52 crc kubenswrapper[4961]: I1205 18:01:52.864194 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:01:52 crc kubenswrapper[4961]: E1205 18:01:52.865121 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:01:55 crc kubenswrapper[4961]: I1205 18:01:55.052599 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/cinder-db-sync-pvww2"] Dec 05 18:01:55 crc kubenswrapper[4961]: I1205 18:01:55.064122 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/cinder-db-sync-pvww2"] Dec 05 18:01:56 crc kubenswrapper[4961]: I1205 18:01:56.885657 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62cd878c-721b-46b4-87bb-1573a9fcf6d9" path="/var/lib/kubelet/pods/62cd878c-721b-46b4-87bb-1573a9fcf6d9/volumes" Dec 05 18:02:07 crc kubenswrapper[4961]: I1205 18:02:07.864666 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:02:07 crc kubenswrapper[4961]: E1205 18:02:07.865967 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:02:14 crc kubenswrapper[4961]: I1205 18:02:14.178838 4961 generic.go:334] "Generic (PLEG): container finished" podID="8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" containerID="36919b3e6f4438c91a79ab7ef9727a4e05aa2ff0bc84352326f8d56194f90104" exitCode=0 Dec 05 18:02:14 crc kubenswrapper[4961]: I1205 18:02:14.178973 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" event={"ID":"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa","Type":"ContainerDied","Data":"36919b3e6f4438c91a79ab7ef9727a4e05aa2ff0bc84352326f8d56194f90104"} Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.578988 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.630427 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-inventory\") pod \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.630989 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d692q\" (UniqueName: \"kubernetes.io/projected/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-kube-api-access-d692q\") pod \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.631168 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-ssh-key\") pod \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\" (UID: \"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa\") " Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.638016 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-kube-api-access-d692q" (OuterVolumeSpecName: "kube-api-access-d692q") pod "8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" (UID: "8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa"). InnerVolumeSpecName "kube-api-access-d692q". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.661921 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-inventory" (OuterVolumeSpecName: "inventory") pod "8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" (UID: "8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.662311 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" (UID: "8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.733626 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.733668 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:15 crc kubenswrapper[4961]: I1205 18:02:15.733682 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d692q\" (UniqueName: \"kubernetes.io/projected/8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa-kube-api-access-d692q\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.204310 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" event={"ID":"8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa","Type":"ContainerDied","Data":"d42bce013dd052b8c5ea87a4ddae10f5d03ead3fe1687506bfb0a8db90bf785d"} Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.204349 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d42bce013dd052b8c5ea87a4ddae10f5d03ead3fe1687506bfb0a8db90bf785d" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.204805 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.315200 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn"] Dec 05 18:02:16 crc kubenswrapper[4961]: E1205 18:02:16.315718 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.315740 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 18:02:16 crc kubenswrapper[4961]: E1205 18:02:16.315794 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1f248e91-833b-4136-ad16-f32f9aff9513" containerName="keystone-cron" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.315800 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="1f248e91-833b-4136-ad16-f32f9aff9513" containerName="keystone-cron" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.315976 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa" containerName="configure-network-edpm-deployment-openstack-edpm-ipam" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.315996 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="1f248e91-833b-4136-ad16-f32f9aff9513" containerName="keystone-cron" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.317059 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.319882 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.319907 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.320167 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.320187 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.346212 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn"] Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.447850 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tb8d2\" (UniqueName: \"kubernetes.io/projected/bed15574-a82b-4a31-baa8-8ddfc4a93972-kube-api-access-tb8d2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.447913 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.448186 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.550941 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.551210 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tb8d2\" (UniqueName: \"kubernetes.io/projected/bed15574-a82b-4a31-baa8-8ddfc4a93972-kube-api-access-tb8d2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.551266 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.559011 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-ssh-key\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.559044 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-inventory\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.573426 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tb8d2\" (UniqueName: \"kubernetes.io/projected/bed15574-a82b-4a31-baa8-8ddfc4a93972-kube-api-access-tb8d2\") pod \"validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:16 crc kubenswrapper[4961]: I1205 18:02:16.648451 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:17 crc kubenswrapper[4961]: I1205 18:02:17.217666 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn"] Dec 05 18:02:18 crc kubenswrapper[4961]: I1205 18:02:18.230817 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" event={"ID":"bed15574-a82b-4a31-baa8-8ddfc4a93972","Type":"ContainerStarted","Data":"58663ce9e24fa934f218944d1887eca4794b1843a127f8065c4e1a8d4d9b62e6"} Dec 05 18:02:18 crc kubenswrapper[4961]: I1205 18:02:18.231331 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" event={"ID":"bed15574-a82b-4a31-baa8-8ddfc4a93972","Type":"ContainerStarted","Data":"20ed6765fff6660f8fcc98bd4e16b6378fa1e94592b8e36e3c4f8be52617b3d8"} Dec 05 18:02:18 crc kubenswrapper[4961]: I1205 18:02:18.255858 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" podStartSLOduration=1.673905329 podStartE2EDuration="2.255825792s" podCreationTimestamp="2025-12-05 18:02:16 +0000 UTC" firstStartedPulling="2025-12-05 18:02:17.230048753 +0000 UTC m=+1743.291199226" lastFinishedPulling="2025-12-05 18:02:17.811969216 +0000 UTC m=+1743.873119689" observedRunningTime="2025-12-05 18:02:18.247161047 +0000 UTC m=+1744.308311530" watchObservedRunningTime="2025-12-05 18:02:18.255825792 +0000 UTC m=+1744.316976265" Dec 05 18:02:21 crc kubenswrapper[4961]: I1205 18:02:21.863757 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:02:21 crc kubenswrapper[4961]: E1205 18:02:21.864325 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:02:23 crc kubenswrapper[4961]: I1205 18:02:23.281846 4961 generic.go:334] "Generic (PLEG): container finished" podID="bed15574-a82b-4a31-baa8-8ddfc4a93972" containerID="58663ce9e24fa934f218944d1887eca4794b1843a127f8065c4e1a8d4d9b62e6" exitCode=0 Dec 05 18:02:23 crc kubenswrapper[4961]: I1205 18:02:23.281950 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" event={"ID":"bed15574-a82b-4a31-baa8-8ddfc4a93972","Type":"ContainerDied","Data":"58663ce9e24fa934f218944d1887eca4794b1843a127f8065c4e1a8d4d9b62e6"} Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.700810 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.856735 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-inventory\") pod \"bed15574-a82b-4a31-baa8-8ddfc4a93972\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.856989 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tb8d2\" (UniqueName: \"kubernetes.io/projected/bed15574-a82b-4a31-baa8-8ddfc4a93972-kube-api-access-tb8d2\") pod \"bed15574-a82b-4a31-baa8-8ddfc4a93972\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.857023 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-ssh-key\") pod \"bed15574-a82b-4a31-baa8-8ddfc4a93972\" (UID: \"bed15574-a82b-4a31-baa8-8ddfc4a93972\") " Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.862445 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bed15574-a82b-4a31-baa8-8ddfc4a93972-kube-api-access-tb8d2" (OuterVolumeSpecName: "kube-api-access-tb8d2") pod "bed15574-a82b-4a31-baa8-8ddfc4a93972" (UID: "bed15574-a82b-4a31-baa8-8ddfc4a93972"). InnerVolumeSpecName "kube-api-access-tb8d2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.886532 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-inventory" (OuterVolumeSpecName: "inventory") pod "bed15574-a82b-4a31-baa8-8ddfc4a93972" (UID: "bed15574-a82b-4a31-baa8-8ddfc4a93972"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.893125 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "bed15574-a82b-4a31-baa8-8ddfc4a93972" (UID: "bed15574-a82b-4a31-baa8-8ddfc4a93972"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.959688 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tb8d2\" (UniqueName: \"kubernetes.io/projected/bed15574-a82b-4a31-baa8-8ddfc4a93972-kube-api-access-tb8d2\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.959858 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:24 crc kubenswrapper[4961]: I1205 18:02:24.959869 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/bed15574-a82b-4a31-baa8-8ddfc4a93972-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.305016 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" event={"ID":"bed15574-a82b-4a31-baa8-8ddfc4a93972","Type":"ContainerDied","Data":"20ed6765fff6660f8fcc98bd4e16b6378fa1e94592b8e36e3c4f8be52617b3d8"} Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.305416 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="20ed6765fff6660f8fcc98bd4e16b6378fa1e94592b8e36e3c4f8be52617b3d8" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.305077 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.408239 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7"] Dec 05 18:02:25 crc kubenswrapper[4961]: E1205 18:02:25.408635 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bed15574-a82b-4a31-baa8-8ddfc4a93972" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.408660 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="bed15574-a82b-4a31-baa8-8ddfc4a93972" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.408925 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="bed15574-a82b-4a31-baa8-8ddfc4a93972" containerName="validate-network-edpm-deployment-openstack-edpm-ipam" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.409605 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.411800 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.411991 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.412318 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.412552 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.424850 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7"] Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.571103 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.571205 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jtn99\" (UniqueName: \"kubernetes.io/projected/0e516b37-5d75-47c4-af9c-438a41abf158-kube-api-access-jtn99\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.571305 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.673322 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.673402 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jtn99\" (UniqueName: \"kubernetes.io/projected/0e516b37-5d75-47c4-af9c-438a41abf158-kube-api-access-jtn99\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.673471 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.677643 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-ssh-key\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.678069 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-inventory\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.693410 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jtn99\" (UniqueName: \"kubernetes.io/projected/0e516b37-5d75-47c4-af9c-438a41abf158-kube-api-access-jtn99\") pod \"install-os-edpm-deployment-openstack-edpm-ipam-7lbk7\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:25 crc kubenswrapper[4961]: I1205 18:02:25.730094 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:02:26 crc kubenswrapper[4961]: I1205 18:02:26.320276 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7"] Dec 05 18:02:27 crc kubenswrapper[4961]: I1205 18:02:27.327981 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" event={"ID":"0e516b37-5d75-47c4-af9c-438a41abf158","Type":"ContainerStarted","Data":"0901a5833b9f815631ee2e6318dd23237a7ef02e3adf0e00f8886a38d040269a"} Dec 05 18:02:27 crc kubenswrapper[4961]: I1205 18:02:27.328616 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" event={"ID":"0e516b37-5d75-47c4-af9c-438a41abf158","Type":"ContainerStarted","Data":"7add37fd772b9e51e664021a1a6f7334a7b10dbd940f01f8f228c354e7526f81"} Dec 05 18:02:27 crc kubenswrapper[4961]: I1205 18:02:27.359894 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" podStartSLOduration=1.903660626 podStartE2EDuration="2.359866628s" podCreationTimestamp="2025-12-05 18:02:25 +0000 UTC" firstStartedPulling="2025-12-05 18:02:26.332703594 +0000 UTC m=+1752.393854067" lastFinishedPulling="2025-12-05 18:02:26.788909596 +0000 UTC m=+1752.850060069" observedRunningTime="2025-12-05 18:02:27.346118007 +0000 UTC m=+1753.407268490" watchObservedRunningTime="2025-12-05 18:02:27.359866628 +0000 UTC m=+1753.421017101" Dec 05 18:02:33 crc kubenswrapper[4961]: I1205 18:02:33.038539 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-6c6c-account-create-update-hsnmn"] Dec 05 18:02:33 crc kubenswrapper[4961]: I1205 18:02:33.051444 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-f95f-account-create-update-bkjnh"] Dec 05 18:02:33 crc kubenswrapper[4961]: I1205 18:02:33.059394 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-6c6c-account-create-update-hsnmn"] Dec 05 18:02:33 crc kubenswrapper[4961]: I1205 18:02:33.067537 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-f95f-account-create-update-bkjnh"] Dec 05 18:02:33 crc kubenswrapper[4961]: I1205 18:02:33.863989 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:02:33 crc kubenswrapper[4961]: E1205 18:02:33.864331 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.041122 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-db-create-7gphb"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.052716 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-db-create-bmgv7"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.065236 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-5809-account-create-update-5n4c9"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.078148 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-api-db-create-8zqnt"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.088958 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-5809-account-create-update-5n4c9"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.098902 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-db-create-7gphb"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.113160 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-api-db-create-8zqnt"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.118850 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-db-create-bmgv7"] Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.874607 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="276d90d2-7b34-45be-a492-89dc67929102" path="/var/lib/kubelet/pods/276d90d2-7b34-45be-a492-89dc67929102/volumes" Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.875269 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="763814d3-5adb-4523-8baa-1ca7f7ecc86b" path="/var/lib/kubelet/pods/763814d3-5adb-4523-8baa-1ca7f7ecc86b/volumes" Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.875865 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56" path="/var/lib/kubelet/pods/79dbb9be-a4f2-4ba9-b4ca-ca1550bc4e56/volumes" Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.876411 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9ed81d6a-5172-4362-b7da-02f4552cb45b" path="/var/lib/kubelet/pods/9ed81d6a-5172-4362-b7da-02f4552cb45b/volumes" Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.877438 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc" path="/var/lib/kubelet/pods/e2f8a87b-c73a-4b0f-a10c-5cd202a3aedc/volumes" Dec 05 18:02:34 crc kubenswrapper[4961]: I1205 18:02:34.877991 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f1728b96-e1ba-4e27-a120-6f6fd3e85437" path="/var/lib/kubelet/pods/f1728b96-e1ba-4e27-a120-6f6fd3e85437/volumes" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.584824 4961 scope.go:117] "RemoveContainer" containerID="0c159959b35f5fb1ae069ea0f318fda3d75cf71e46117fb47768e842a99f6a0a" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.619537 4961 scope.go:117] "RemoveContainer" containerID="8c1c0fe9d0c89482eee963cca2e897add7e5c0fc6031477e2f0cf7eeb00daab9" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.673302 4961 scope.go:117] "RemoveContainer" containerID="4b8f995fae745c0435941c2bcb6125cfe1428c628f7286e959a633d7e9ba1ff8" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.708654 4961 scope.go:117] "RemoveContainer" containerID="308dd2f22a6c6777855cc9d754902ac4a7ce3a59b8ac8c53e85361eca2c09457" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.748879 4961 scope.go:117] "RemoveContainer" containerID="35f6ce998ecf0ac3757a2bb7201aba75818978a7420ffd58d7d2f9c5e93ed279" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.806320 4961 scope.go:117] "RemoveContainer" containerID="b5d5b6efaaef42c2d6310136042a05e2583aedfa1b3777bee0ec9b8b3b1e74e7" Dec 05 18:02:43 crc kubenswrapper[4961]: I1205 18:02:43.854376 4961 scope.go:117] "RemoveContainer" containerID="a962669bb87dce720ca35fd1752e5a631befba4e2a10b94bb1a1b2a2cadef9f9" Dec 05 18:02:47 crc kubenswrapper[4961]: I1205 18:02:47.864204 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:02:47 crc kubenswrapper[4961]: E1205 18:02:47.865055 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:02:58 crc kubenswrapper[4961]: I1205 18:02:58.864386 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:02:58 crc kubenswrapper[4961]: E1205 18:02:58.865194 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:03:06 crc kubenswrapper[4961]: I1205 18:03:06.667987 4961 generic.go:334] "Generic (PLEG): container finished" podID="0e516b37-5d75-47c4-af9c-438a41abf158" containerID="0901a5833b9f815631ee2e6318dd23237a7ef02e3adf0e00f8886a38d040269a" exitCode=0 Dec 05 18:03:06 crc kubenswrapper[4961]: I1205 18:03:06.668110 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" event={"ID":"0e516b37-5d75-47c4-af9c-438a41abf158","Type":"ContainerDied","Data":"0901a5833b9f815631ee2e6318dd23237a7ef02e3adf0e00f8886a38d040269a"} Dec 05 18:03:07 crc kubenswrapper[4961]: I1205 18:03:07.043175 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6nzdc"] Dec 05 18:03:07 crc kubenswrapper[4961]: I1205 18:03:07.056427 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-conductor-db-sync-6nzdc"] Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.117281 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.203117 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jtn99\" (UniqueName: \"kubernetes.io/projected/0e516b37-5d75-47c4-af9c-438a41abf158-kube-api-access-jtn99\") pod \"0e516b37-5d75-47c4-af9c-438a41abf158\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.203455 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-inventory\") pod \"0e516b37-5d75-47c4-af9c-438a41abf158\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.203549 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-ssh-key\") pod \"0e516b37-5d75-47c4-af9c-438a41abf158\" (UID: \"0e516b37-5d75-47c4-af9c-438a41abf158\") " Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.209196 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0e516b37-5d75-47c4-af9c-438a41abf158-kube-api-access-jtn99" (OuterVolumeSpecName: "kube-api-access-jtn99") pod "0e516b37-5d75-47c4-af9c-438a41abf158" (UID: "0e516b37-5d75-47c4-af9c-438a41abf158"). InnerVolumeSpecName "kube-api-access-jtn99". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.235448 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-inventory" (OuterVolumeSpecName: "inventory") pod "0e516b37-5d75-47c4-af9c-438a41abf158" (UID: "0e516b37-5d75-47c4-af9c-438a41abf158"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.247034 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "0e516b37-5d75-47c4-af9c-438a41abf158" (UID: "0e516b37-5d75-47c4-af9c-438a41abf158"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.306088 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.306122 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/0e516b37-5d75-47c4-af9c-438a41abf158-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.306132 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jtn99\" (UniqueName: \"kubernetes.io/projected/0e516b37-5d75-47c4-af9c-438a41abf158-kube-api-access-jtn99\") on node \"crc\" DevicePath \"\"" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.684555 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" event={"ID":"0e516b37-5d75-47c4-af9c-438a41abf158","Type":"ContainerDied","Data":"7add37fd772b9e51e664021a1a6f7334a7b10dbd940f01f8f228c354e7526f81"} Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.684594 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7add37fd772b9e51e664021a1a6f7334a7b10dbd940f01f8f228c354e7526f81" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.684648 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-os-edpm-deployment-openstack-edpm-ipam-7lbk7" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.776960 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz"] Dec 05 18:03:08 crc kubenswrapper[4961]: E1205 18:03:08.777582 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0e516b37-5d75-47c4-af9c-438a41abf158" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.777600 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="0e516b37-5d75-47c4-af9c-438a41abf158" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.777823 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="0e516b37-5d75-47c4-af9c-438a41abf158" containerName="install-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.778590 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.786749 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.786877 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.786876 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.787402 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.795647 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz"] Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.874311 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="48ded27c-f907-4876-899b-236f2b1aa906" path="/var/lib/kubelet/pods/48ded27c-f907-4876-899b-236f2b1aa906/volumes" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.916362 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gpxw5\" (UniqueName: \"kubernetes.io/projected/877c43bb-852d-4f38-8322-8c72200ca936-kube-api-access-gpxw5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.916459 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:08 crc kubenswrapper[4961]: I1205 18:03:08.916504 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.019150 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gpxw5\" (UniqueName: \"kubernetes.io/projected/877c43bb-852d-4f38-8322-8c72200ca936-kube-api-access-gpxw5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.019307 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.019384 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.024952 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-ssh-key\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.041096 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gpxw5\" (UniqueName: \"kubernetes.io/projected/877c43bb-852d-4f38-8322-8c72200ca936-kube-api-access-gpxw5\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.041230 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-inventory\") pod \"configure-os-edpm-deployment-openstack-edpm-ipam-592hz\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.100248 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.655906 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz"] Dec 05 18:03:09 crc kubenswrapper[4961]: W1205 18:03:09.658639 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod877c43bb_852d_4f38_8322_8c72200ca936.slice/crio-57fb8a40dcc93c61795955be00f7247c006c785ecd6d2f311151f44a67698a2e WatchSource:0}: Error finding container 57fb8a40dcc93c61795955be00f7247c006c785ecd6d2f311151f44a67698a2e: Status 404 returned error can't find the container with id 57fb8a40dcc93c61795955be00f7247c006c785ecd6d2f311151f44a67698a2e Dec 05 18:03:09 crc kubenswrapper[4961]: I1205 18:03:09.696074 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" event={"ID":"877c43bb-852d-4f38-8322-8c72200ca936","Type":"ContainerStarted","Data":"57fb8a40dcc93c61795955be00f7247c006c785ecd6d2f311151f44a67698a2e"} Dec 05 18:03:10 crc kubenswrapper[4961]: I1205 18:03:10.705250 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" event={"ID":"877c43bb-852d-4f38-8322-8c72200ca936","Type":"ContainerStarted","Data":"0f1402228661440e1e5b033fe0edfbb2b2678ca817637b73c8ead671b98c6800"} Dec 05 18:03:10 crc kubenswrapper[4961]: I1205 18:03:10.724285 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" podStartSLOduration=2.2942466120000002 podStartE2EDuration="2.724266836s" podCreationTimestamp="2025-12-05 18:03:08 +0000 UTC" firstStartedPulling="2025-12-05 18:03:09.662340352 +0000 UTC m=+1795.723490825" lastFinishedPulling="2025-12-05 18:03:10.092360576 +0000 UTC m=+1796.153511049" observedRunningTime="2025-12-05 18:03:10.721887388 +0000 UTC m=+1796.783037891" watchObservedRunningTime="2025-12-05 18:03:10.724266836 +0000 UTC m=+1796.785417309" Dec 05 18:03:11 crc kubenswrapper[4961]: I1205 18:03:11.863648 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:03:11 crc kubenswrapper[4961]: E1205 18:03:11.863989 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:03:26 crc kubenswrapper[4961]: I1205 18:03:26.865186 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:03:26 crc kubenswrapper[4961]: E1205 18:03:26.866473 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:03:30 crc kubenswrapper[4961]: I1205 18:03:30.061024 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell0-cell-mapping-8k9zj"] Dec 05 18:03:30 crc kubenswrapper[4961]: I1205 18:03:30.079569 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell0-cell-mapping-8k9zj"] Dec 05 18:03:30 crc kubenswrapper[4961]: I1205 18:03:30.874996 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85bec0ac-6b6a-4532-a810-f468dded1d0e" path="/var/lib/kubelet/pods/85bec0ac-6b6a-4532-a810-f468dded1d0e/volumes" Dec 05 18:03:32 crc kubenswrapper[4961]: I1205 18:03:32.031301 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dxcw5"] Dec 05 18:03:32 crc kubenswrapper[4961]: I1205 18:03:32.042071 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-conductor-db-sync-dxcw5"] Dec 05 18:03:32 crc kubenswrapper[4961]: I1205 18:03:32.873341 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="02132a97-8a35-4fc1-80ae-fb221ab2b8c6" path="/var/lib/kubelet/pods/02132a97-8a35-4fc1-80ae-fb221ab2b8c6/volumes" Dec 05 18:03:38 crc kubenswrapper[4961]: I1205 18:03:38.863609 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:03:38 crc kubenswrapper[4961]: E1205 18:03:38.864269 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:03:44 crc kubenswrapper[4961]: I1205 18:03:44.044278 4961 scope.go:117] "RemoveContainer" containerID="ad19af129ed4b2a2d920494446107a71aafadc14cc4915299ed33300a7ebcf0e" Dec 05 18:03:44 crc kubenswrapper[4961]: I1205 18:03:44.099290 4961 scope.go:117] "RemoveContainer" containerID="bfe9bba830ac76f72adc2e647d726c432709084db421691029eb237d2490dc53" Dec 05 18:03:44 crc kubenswrapper[4961]: I1205 18:03:44.154257 4961 scope.go:117] "RemoveContainer" containerID="352294156f014497b9fe3a11132400266f168e51a879bebf89d68908807fa945" Dec 05 18:03:53 crc kubenswrapper[4961]: I1205 18:03:53.864591 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:03:53 crc kubenswrapper[4961]: E1205 18:03:53.865316 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:04:04 crc kubenswrapper[4961]: I1205 18:04:04.272313 4961 generic.go:334] "Generic (PLEG): container finished" podID="877c43bb-852d-4f38-8322-8c72200ca936" containerID="0f1402228661440e1e5b033fe0edfbb2b2678ca817637b73c8ead671b98c6800" exitCode=0 Dec 05 18:04:04 crc kubenswrapper[4961]: I1205 18:04:04.272349 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" event={"ID":"877c43bb-852d-4f38-8322-8c72200ca936","Type":"ContainerDied","Data":"0f1402228661440e1e5b033fe0edfbb2b2678ca817637b73c8ead671b98c6800"} Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.680198 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.813356 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gpxw5\" (UniqueName: \"kubernetes.io/projected/877c43bb-852d-4f38-8322-8c72200ca936-kube-api-access-gpxw5\") pod \"877c43bb-852d-4f38-8322-8c72200ca936\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.813420 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-inventory\") pod \"877c43bb-852d-4f38-8322-8c72200ca936\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.813470 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-ssh-key\") pod \"877c43bb-852d-4f38-8322-8c72200ca936\" (UID: \"877c43bb-852d-4f38-8322-8c72200ca936\") " Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.819364 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/877c43bb-852d-4f38-8322-8c72200ca936-kube-api-access-gpxw5" (OuterVolumeSpecName: "kube-api-access-gpxw5") pod "877c43bb-852d-4f38-8322-8c72200ca936" (UID: "877c43bb-852d-4f38-8322-8c72200ca936"). InnerVolumeSpecName "kube-api-access-gpxw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.854834 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "877c43bb-852d-4f38-8322-8c72200ca936" (UID: "877c43bb-852d-4f38-8322-8c72200ca936"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.857481 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-inventory" (OuterVolumeSpecName: "inventory") pod "877c43bb-852d-4f38-8322-8c72200ca936" (UID: "877c43bb-852d-4f38-8322-8c72200ca936"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.916173 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.916220 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gpxw5\" (UniqueName: \"kubernetes.io/projected/877c43bb-852d-4f38-8322-8c72200ca936-kube-api-access-gpxw5\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:05 crc kubenswrapper[4961]: I1205 18:04:05.916232 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/877c43bb-852d-4f38-8322-8c72200ca936-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.293431 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" event={"ID":"877c43bb-852d-4f38-8322-8c72200ca936","Type":"ContainerDied","Data":"57fb8a40dcc93c61795955be00f7247c006c785ecd6d2f311151f44a67698a2e"} Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.293484 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="57fb8a40dcc93c61795955be00f7247c006c785ecd6d2f311151f44a67698a2e" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.293524 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/configure-os-edpm-deployment-openstack-edpm-ipam-592hz" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.419606 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qgg4h"] Dec 05 18:04:06 crc kubenswrapper[4961]: E1205 18:04:06.420023 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877c43bb-852d-4f38-8322-8c72200ca936" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.420042 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="877c43bb-852d-4f38-8322-8c72200ca936" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.420250 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="877c43bb-852d-4f38-8322-8c72200ca936" containerName="configure-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.420940 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.422748 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.422749 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.423838 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.424165 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.435427 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qgg4h"] Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.528559 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-scrbc\" (UniqueName: \"kubernetes.io/projected/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-kube-api-access-scrbc\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.528611 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.528654 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.630697 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-scrbc\" (UniqueName: \"kubernetes.io/projected/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-kube-api-access-scrbc\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.630750 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.630807 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.636378 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-inventory-0\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.637194 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-ssh-key-openstack-edpm-ipam\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.649730 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-scrbc\" (UniqueName: \"kubernetes.io/projected/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-kube-api-access-scrbc\") pod \"ssh-known-hosts-edpm-deployment-qgg4h\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:06 crc kubenswrapper[4961]: I1205 18:04:06.740322 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:07 crc kubenswrapper[4961]: I1205 18:04:07.232661 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ssh-known-hosts-edpm-deployment-qgg4h"] Dec 05 18:04:07 crc kubenswrapper[4961]: I1205 18:04:07.246410 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:04:07 crc kubenswrapper[4961]: I1205 18:04:07.302284 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" event={"ID":"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc","Type":"ContainerStarted","Data":"0f8db901e480cb838b84870ffb222a970731d972eba795b3cc79e45c0cee5197"} Dec 05 18:04:08 crc kubenswrapper[4961]: I1205 18:04:08.314027 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" event={"ID":"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc","Type":"ContainerStarted","Data":"494f274199098656d7bfc1fed0a931207b9eab926a745ce827b1a79c80553eb2"} Dec 05 18:04:08 crc kubenswrapper[4961]: I1205 18:04:08.341213 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" podStartSLOduration=1.840484353 podStartE2EDuration="2.341188606s" podCreationTimestamp="2025-12-05 18:04:06 +0000 UTC" firstStartedPulling="2025-12-05 18:04:07.246137483 +0000 UTC m=+1853.307287966" lastFinishedPulling="2025-12-05 18:04:07.746841756 +0000 UTC m=+1853.807992219" observedRunningTime="2025-12-05 18:04:08.335146917 +0000 UTC m=+1854.396297390" watchObservedRunningTime="2025-12-05 18:04:08.341188606 +0000 UTC m=+1854.402339099" Dec 05 18:04:08 crc kubenswrapper[4961]: I1205 18:04:08.863662 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:04:08 crc kubenswrapper[4961]: E1205 18:04:08.863933 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:04:15 crc kubenswrapper[4961]: I1205 18:04:15.049709 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrgs"] Dec 05 18:04:15 crc kubenswrapper[4961]: I1205 18:04:15.059965 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack/nova-cell1-cell-mapping-rkrgs"] Dec 05 18:04:15 crc kubenswrapper[4961]: I1205 18:04:15.375280 4961 generic.go:334] "Generic (PLEG): container finished" podID="fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" containerID="494f274199098656d7bfc1fed0a931207b9eab926a745ce827b1a79c80553eb2" exitCode=0 Dec 05 18:04:15 crc kubenswrapper[4961]: I1205 18:04:15.375328 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" event={"ID":"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc","Type":"ContainerDied","Data":"494f274199098656d7bfc1fed0a931207b9eab926a745ce827b1a79c80553eb2"} Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.859024 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.882072 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5656c8dd-d36b-48ae-a272-01f789cf280d" path="/var/lib/kubelet/pods/5656c8dd-d36b-48ae-a272-01f789cf280d/volumes" Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.958335 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-scrbc\" (UniqueName: \"kubernetes.io/projected/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-kube-api-access-scrbc\") pod \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.959073 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-ssh-key-openstack-edpm-ipam\") pod \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.959404 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-inventory-0\") pod \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\" (UID: \"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc\") " Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.968159 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-kube-api-access-scrbc" (OuterVolumeSpecName: "kube-api-access-scrbc") pod "fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" (UID: "fdec8a7a-22cd-486f-81e5-9a8a7931b9bc"). InnerVolumeSpecName "kube-api-access-scrbc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:04:16 crc kubenswrapper[4961]: I1205 18:04:16.995259 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-inventory-0" (OuterVolumeSpecName: "inventory-0") pod "fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" (UID: "fdec8a7a-22cd-486f-81e5-9a8a7931b9bc"). InnerVolumeSpecName "inventory-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.009688 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-ssh-key-openstack-edpm-ipam" (OuterVolumeSpecName: "ssh-key-openstack-edpm-ipam") pod "fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" (UID: "fdec8a7a-22cd-486f-81e5-9a8a7931b9bc"). InnerVolumeSpecName "ssh-key-openstack-edpm-ipam". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.062932 4961 reconciler_common.go:293] "Volume detached for volume \"inventory-0\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-inventory-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.062971 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-scrbc\" (UniqueName: \"kubernetes.io/projected/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-kube-api-access-scrbc\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.062984 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key-openstack-edpm-ipam\" (UniqueName: \"kubernetes.io/secret/fdec8a7a-22cd-486f-81e5-9a8a7931b9bc-ssh-key-openstack-edpm-ipam\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.393430 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" event={"ID":"fdec8a7a-22cd-486f-81e5-9a8a7931b9bc","Type":"ContainerDied","Data":"0f8db901e480cb838b84870ffb222a970731d972eba795b3cc79e45c0cee5197"} Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.393470 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0f8db901e480cb838b84870ffb222a970731d972eba795b3cc79e45c0cee5197" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.393564 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ssh-known-hosts-edpm-deployment-qgg4h" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.476059 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44"] Dec 05 18:04:17 crc kubenswrapper[4961]: E1205 18:04:17.476688 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" containerName="ssh-known-hosts-edpm-deployment" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.476720 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" containerName="ssh-known-hosts-edpm-deployment" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.477049 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="fdec8a7a-22cd-486f-81e5-9a8a7931b9bc" containerName="ssh-known-hosts-edpm-deployment" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.478082 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.480507 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.480802 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.481695 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.483430 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.489654 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44"] Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.576429 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.576494 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.576546 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2tkt8\" (UniqueName: \"kubernetes.io/projected/53528787-f94b-4255-bb5b-57b8c583eaaf-kube-api-access-2tkt8\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.679064 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.679164 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.679227 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2tkt8\" (UniqueName: \"kubernetes.io/projected/53528787-f94b-4255-bb5b-57b8c583eaaf-kube-api-access-2tkt8\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.683223 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-ssh-key\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.683397 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-inventory\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.707275 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2tkt8\" (UniqueName: \"kubernetes.io/projected/53528787-f94b-4255-bb5b-57b8c583eaaf-kube-api-access-2tkt8\") pod \"run-os-edpm-deployment-openstack-edpm-ipam-mdm44\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:17 crc kubenswrapper[4961]: I1205 18:04:17.799841 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:18 crc kubenswrapper[4961]: I1205 18:04:18.306959 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44"] Dec 05 18:04:18 crc kubenswrapper[4961]: W1205 18:04:18.310379 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod53528787_f94b_4255_bb5b_57b8c583eaaf.slice/crio-3e02b2e3780a3149ecd01b97369910da3e889fd55d9fecc8be9434ce48b20c62 WatchSource:0}: Error finding container 3e02b2e3780a3149ecd01b97369910da3e889fd55d9fecc8be9434ce48b20c62: Status 404 returned error can't find the container with id 3e02b2e3780a3149ecd01b97369910da3e889fd55d9fecc8be9434ce48b20c62 Dec 05 18:04:18 crc kubenswrapper[4961]: I1205 18:04:18.406958 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" event={"ID":"53528787-f94b-4255-bb5b-57b8c583eaaf","Type":"ContainerStarted","Data":"3e02b2e3780a3149ecd01b97369910da3e889fd55d9fecc8be9434ce48b20c62"} Dec 05 18:04:19 crc kubenswrapper[4961]: I1205 18:04:19.418664 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" event={"ID":"53528787-f94b-4255-bb5b-57b8c583eaaf","Type":"ContainerStarted","Data":"f2f147b4e73190193d34ace12279496744fa8cb1c3e297d7342fe32b9399bf47"} Dec 05 18:04:19 crc kubenswrapper[4961]: I1205 18:04:19.437956 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" podStartSLOduration=2.000977699 podStartE2EDuration="2.437931654s" podCreationTimestamp="2025-12-05 18:04:17 +0000 UTC" firstStartedPulling="2025-12-05 18:04:18.313191915 +0000 UTC m=+1864.374342388" lastFinishedPulling="2025-12-05 18:04:18.75014588 +0000 UTC m=+1864.811296343" observedRunningTime="2025-12-05 18:04:19.43656675 +0000 UTC m=+1865.497717243" watchObservedRunningTime="2025-12-05 18:04:19.437931654 +0000 UTC m=+1865.499082147" Dec 05 18:04:22 crc kubenswrapper[4961]: I1205 18:04:22.863995 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:04:22 crc kubenswrapper[4961]: E1205 18:04:22.865089 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:04:27 crc kubenswrapper[4961]: I1205 18:04:27.493704 4961 generic.go:334] "Generic (PLEG): container finished" podID="53528787-f94b-4255-bb5b-57b8c583eaaf" containerID="f2f147b4e73190193d34ace12279496744fa8cb1c3e297d7342fe32b9399bf47" exitCode=0 Dec 05 18:04:27 crc kubenswrapper[4961]: I1205 18:04:27.493831 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" event={"ID":"53528787-f94b-4255-bb5b-57b8c583eaaf","Type":"ContainerDied","Data":"f2f147b4e73190193d34ace12279496744fa8cb1c3e297d7342fe32b9399bf47"} Dec 05 18:04:28 crc kubenswrapper[4961]: I1205 18:04:28.919909 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.027653 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2tkt8\" (UniqueName: \"kubernetes.io/projected/53528787-f94b-4255-bb5b-57b8c583eaaf-kube-api-access-2tkt8\") pod \"53528787-f94b-4255-bb5b-57b8c583eaaf\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.028094 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-ssh-key\") pod \"53528787-f94b-4255-bb5b-57b8c583eaaf\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.028380 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-inventory\") pod \"53528787-f94b-4255-bb5b-57b8c583eaaf\" (UID: \"53528787-f94b-4255-bb5b-57b8c583eaaf\") " Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.039262 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/53528787-f94b-4255-bb5b-57b8c583eaaf-kube-api-access-2tkt8" (OuterVolumeSpecName: "kube-api-access-2tkt8") pod "53528787-f94b-4255-bb5b-57b8c583eaaf" (UID: "53528787-f94b-4255-bb5b-57b8c583eaaf"). InnerVolumeSpecName "kube-api-access-2tkt8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.059526 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "53528787-f94b-4255-bb5b-57b8c583eaaf" (UID: "53528787-f94b-4255-bb5b-57b8c583eaaf"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.061012 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-inventory" (OuterVolumeSpecName: "inventory") pod "53528787-f94b-4255-bb5b-57b8c583eaaf" (UID: "53528787-f94b-4255-bb5b-57b8c583eaaf"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.130826 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.130867 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2tkt8\" (UniqueName: \"kubernetes.io/projected/53528787-f94b-4255-bb5b-57b8c583eaaf-kube-api-access-2tkt8\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.130884 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/53528787-f94b-4255-bb5b-57b8c583eaaf-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.512083 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" event={"ID":"53528787-f94b-4255-bb5b-57b8c583eaaf","Type":"ContainerDied","Data":"3e02b2e3780a3149ecd01b97369910da3e889fd55d9fecc8be9434ce48b20c62"} Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.512426 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/run-os-edpm-deployment-openstack-edpm-ipam-mdm44" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.512444 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e02b2e3780a3149ecd01b97369910da3e889fd55d9fecc8be9434ce48b20c62" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.616978 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8"] Dec 05 18:04:29 crc kubenswrapper[4961]: E1205 18:04:29.617435 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="53528787-f94b-4255-bb5b-57b8c583eaaf" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.617453 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="53528787-f94b-4255-bb5b-57b8c583eaaf" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.617677 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="53528787-f94b-4255-bb5b-57b8c583eaaf" containerName="run-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.618360 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.621732 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.621964 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.622272 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.623054 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.640669 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8"] Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.677188 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.677719 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wp6k8\" (UniqueName: \"kubernetes.io/projected/f2a419b8-744d-4932-a845-d9376364834b-kube-api-access-wp6k8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.678084 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.780150 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wp6k8\" (UniqueName: \"kubernetes.io/projected/f2a419b8-744d-4932-a845-d9376364834b-kube-api-access-wp6k8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.780258 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.780355 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.786977 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-inventory\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.787925 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-ssh-key\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.797304 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wp6k8\" (UniqueName: \"kubernetes.io/projected/f2a419b8-744d-4932-a845-d9376364834b-kube-api-access-wp6k8\") pod \"reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:29 crc kubenswrapper[4961]: I1205 18:04:29.938594 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:30 crc kubenswrapper[4961]: W1205 18:04:30.478484 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podf2a419b8_744d_4932_a845_d9376364834b.slice/crio-fc6570ade34c2a02b0bb4869e97427899862ba111a345a288d010355f0e8b8a9 WatchSource:0}: Error finding container fc6570ade34c2a02b0bb4869e97427899862ba111a345a288d010355f0e8b8a9: Status 404 returned error can't find the container with id fc6570ade34c2a02b0bb4869e97427899862ba111a345a288d010355f0e8b8a9 Dec 05 18:04:30 crc kubenswrapper[4961]: I1205 18:04:30.479969 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8"] Dec 05 18:04:30 crc kubenswrapper[4961]: I1205 18:04:30.532912 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" event={"ID":"f2a419b8-744d-4932-a845-d9376364834b","Type":"ContainerStarted","Data":"fc6570ade34c2a02b0bb4869e97427899862ba111a345a288d010355f0e8b8a9"} Dec 05 18:04:31 crc kubenswrapper[4961]: I1205 18:04:31.544426 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" event={"ID":"f2a419b8-744d-4932-a845-d9376364834b","Type":"ContainerStarted","Data":"91e0b8a51e7a724029235c55d661179dacaba95e93dc9696c249e42833d46d7f"} Dec 05 18:04:31 crc kubenswrapper[4961]: I1205 18:04:31.568311 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" podStartSLOduration=2.082469539 podStartE2EDuration="2.568290024s" podCreationTimestamp="2025-12-05 18:04:29 +0000 UTC" firstStartedPulling="2025-12-05 18:04:30.48531585 +0000 UTC m=+1876.546466333" lastFinishedPulling="2025-12-05 18:04:30.971136345 +0000 UTC m=+1877.032286818" observedRunningTime="2025-12-05 18:04:31.559594799 +0000 UTC m=+1877.620745292" watchObservedRunningTime="2025-12-05 18:04:31.568290024 +0000 UTC m=+1877.629440497" Dec 05 18:04:36 crc kubenswrapper[4961]: I1205 18:04:36.864432 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:04:37 crc kubenswrapper[4961]: I1205 18:04:37.596573 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"a4dfbf02cada67058f2ed254a8aeb3929d2402207851cbed84f73238ae936112"} Dec 05 18:04:40 crc kubenswrapper[4961]: I1205 18:04:40.625138 4961 generic.go:334] "Generic (PLEG): container finished" podID="f2a419b8-744d-4932-a845-d9376364834b" containerID="91e0b8a51e7a724029235c55d661179dacaba95e93dc9696c249e42833d46d7f" exitCode=0 Dec 05 18:04:40 crc kubenswrapper[4961]: I1205 18:04:40.625263 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" event={"ID":"f2a419b8-744d-4932-a845-d9376364834b","Type":"ContainerDied","Data":"91e0b8a51e7a724029235c55d661179dacaba95e93dc9696c249e42833d46d7f"} Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.119899 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.231329 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-ssh-key\") pod \"f2a419b8-744d-4932-a845-d9376364834b\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.231939 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wp6k8\" (UniqueName: \"kubernetes.io/projected/f2a419b8-744d-4932-a845-d9376364834b-kube-api-access-wp6k8\") pod \"f2a419b8-744d-4932-a845-d9376364834b\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.232136 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-inventory\") pod \"f2a419b8-744d-4932-a845-d9376364834b\" (UID: \"f2a419b8-744d-4932-a845-d9376364834b\") " Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.236997 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f2a419b8-744d-4932-a845-d9376364834b-kube-api-access-wp6k8" (OuterVolumeSpecName: "kube-api-access-wp6k8") pod "f2a419b8-744d-4932-a845-d9376364834b" (UID: "f2a419b8-744d-4932-a845-d9376364834b"). InnerVolumeSpecName "kube-api-access-wp6k8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.258413 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "f2a419b8-744d-4932-a845-d9376364834b" (UID: "f2a419b8-744d-4932-a845-d9376364834b"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.260318 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-inventory" (OuterVolumeSpecName: "inventory") pod "f2a419b8-744d-4932-a845-d9376364834b" (UID: "f2a419b8-744d-4932-a845-d9376364834b"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.334509 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.334557 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wp6k8\" (UniqueName: \"kubernetes.io/projected/f2a419b8-744d-4932-a845-d9376364834b-kube-api-access-wp6k8\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.334574 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/f2a419b8-744d-4932-a845-d9376364834b-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.647425 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" event={"ID":"f2a419b8-744d-4932-a845-d9376364834b","Type":"ContainerDied","Data":"fc6570ade34c2a02b0bb4869e97427899862ba111a345a288d010355f0e8b8a9"} Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.647462 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.647466 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fc6570ade34c2a02b0bb4869e97427899862ba111a345a288d010355f0e8b8a9" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.736291 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr"] Dec 05 18:04:42 crc kubenswrapper[4961]: E1205 18:04:42.736800 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f2a419b8-744d-4932-a845-d9376364834b" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.736825 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="f2a419b8-744d-4932-a845-d9376364834b" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.737071 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="f2a419b8-744d-4932-a845-d9376364834b" containerName="reboot-os-edpm-deployment-openstack-edpm-ipam" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.737800 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.739668 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.744102 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.744156 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-neutron-metadata-default-certs-0" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.744364 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-libvirt-default-certs-0" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.744549 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-telemetry-default-certs-0" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.744742 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.744888 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-ovn-default-certs-0" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.745010 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.749577 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr"] Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842359 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842419 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842439 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842533 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842565 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842761 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842830 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842868 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842930 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.842984 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.843112 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.843182 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.843257 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjr74\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-kube-api-access-qjr74\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.843296 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945143 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjr74\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-kube-api-access-qjr74\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945192 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945235 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945267 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945288 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945308 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945329 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945370 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945388 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945411 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945442 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945472 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945515 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.945545 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.950396 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ovn-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.950801 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-ovn-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.950922 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-inventory\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.951179 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-repo-setup-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.951432 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.951579 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-telemetry-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.952154 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-nova-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.952168 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-bootstrap-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.952580 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.953532 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-libvirt-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.953688 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ssh-key\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.957071 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.957386 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-neutron-metadata-combined-ca-bundle\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:42 crc kubenswrapper[4961]: I1205 18:04:42.967491 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjr74\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-kube-api-access-qjr74\") pod \"install-certs-edpm-deployment-openstack-edpm-ipam-925jr\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:43 crc kubenswrapper[4961]: I1205 18:04:43.058580 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:04:43 crc kubenswrapper[4961]: I1205 18:04:43.576478 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr"] Dec 05 18:04:43 crc kubenswrapper[4961]: I1205 18:04:43.656561 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" event={"ID":"b8b70414-5c28-428e-90be-4e5d82070919","Type":"ContainerStarted","Data":"5115a3ddaa1c3fec6f00653e92327b497549e9cc0ffda3c614b1441cf96de1ae"} Dec 05 18:04:44 crc kubenswrapper[4961]: I1205 18:04:44.271461 4961 scope.go:117] "RemoveContainer" containerID="513dfaf8e21e3fdf52b17d1c804c70d66940f31d4d712bebaa85e068d0aa34ba" Dec 05 18:04:44 crc kubenswrapper[4961]: I1205 18:04:44.665901 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" event={"ID":"b8b70414-5c28-428e-90be-4e5d82070919","Type":"ContainerStarted","Data":"40977f004647532352ea99add12c49aeab7787de42cb51e772ea76d73e1f5296"} Dec 05 18:04:44 crc kubenswrapper[4961]: I1205 18:04:44.692615 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" podStartSLOduration=2.278759265 podStartE2EDuration="2.692597268s" podCreationTimestamp="2025-12-05 18:04:42 +0000 UTC" firstStartedPulling="2025-12-05 18:04:43.583324751 +0000 UTC m=+1889.644475224" lastFinishedPulling="2025-12-05 18:04:43.997162754 +0000 UTC m=+1890.058313227" observedRunningTime="2025-12-05 18:04:44.684072937 +0000 UTC m=+1890.745223410" watchObservedRunningTime="2025-12-05 18:04:44.692597268 +0000 UTC m=+1890.753747741" Dec 05 18:05:24 crc kubenswrapper[4961]: I1205 18:05:24.025047 4961 generic.go:334] "Generic (PLEG): container finished" podID="b8b70414-5c28-428e-90be-4e5d82070919" containerID="40977f004647532352ea99add12c49aeab7787de42cb51e772ea76d73e1f5296" exitCode=0 Dec 05 18:05:24 crc kubenswrapper[4961]: I1205 18:05:24.025136 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" event={"ID":"b8b70414-5c28-428e-90be-4e5d82070919","Type":"ContainerDied","Data":"40977f004647532352ea99add12c49aeab7787de42cb51e772ea76d73e1f5296"} Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.478821 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.628454 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-libvirt-default-certs-0\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.628544 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-bootstrap-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.628629 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ovn-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.628654 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-ovn-default-certs-0\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.628684 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-neutron-metadata-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.628758 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-inventory\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629332 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjr74\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-kube-api-access-qjr74\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629385 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-neutron-metadata-default-certs-0\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629427 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-telemetry-default-certs-0\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629479 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-telemetry-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629627 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-nova-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629657 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-libvirt-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629700 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-repo-setup-combined-ca-bundle\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.629725 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ssh-key\") pod \"b8b70414-5c28-428e-90be-4e5d82070919\" (UID: \"b8b70414-5c28-428e-90be-4e5d82070919\") " Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.670161 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-libvirt-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-libvirt-default-certs-0") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "openstack-edpm-ipam-libvirt-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.671931 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.672162 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-kube-api-access-qjr74" (OuterVolumeSpecName: "kube-api-access-qjr74") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "kube-api-access-qjr74". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.672538 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-bootstrap-combined-ca-bundle" (OuterVolumeSpecName: "bootstrap-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "bootstrap-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.674930 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.675051 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-ovn-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-ovn-default-certs-0") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "openstack-edpm-ipam-ovn-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.675885 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.682312 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-neutron-metadata-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-neutron-metadata-default-certs-0") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "openstack-edpm-ipam-neutron-metadata-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.689812 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.696388 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-telemetry-default-certs-0" (OuterVolumeSpecName: "openstack-edpm-ipam-telemetry-default-certs-0") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "openstack-edpm-ipam-telemetry-default-certs-0". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.697001 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.697027 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-repo-setup-combined-ca-bundle" (OuterVolumeSpecName: "repo-setup-combined-ca-bundle") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "repo-setup-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.700480 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-inventory" (OuterVolumeSpecName: "inventory") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.702617 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "b8b70414-5c28-428e-90be-4e5d82070919" (UID: "b8b70414-5c28-428e-90be-4e5d82070919"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732844 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732874 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjr74\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-kube-api-access-qjr74\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732887 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-neutron-metadata-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-neutron-metadata-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732897 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-telemetry-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-telemetry-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732907 4961 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732921 4961 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732929 4961 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732938 4961 reconciler_common.go:293] "Volume detached for volume \"repo-setup-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-repo-setup-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732946 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732955 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-libvirt-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-libvirt-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732964 4961 reconciler_common.go:293] "Volume detached for volume \"bootstrap-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-bootstrap-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732973 4961 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732981 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-edpm-ipam-ovn-default-certs-0\" (UniqueName: \"kubernetes.io/projected/b8b70414-5c28-428e-90be-4e5d82070919-openstack-edpm-ipam-ovn-default-certs-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:25 crc kubenswrapper[4961]: I1205 18:05:25.732990 4961 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/b8b70414-5c28-428e-90be-4e5d82070919-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.061978 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" event={"ID":"b8b70414-5c28-428e-90be-4e5d82070919","Type":"ContainerDied","Data":"5115a3ddaa1c3fec6f00653e92327b497549e9cc0ffda3c614b1441cf96de1ae"} Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.062021 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5115a3ddaa1c3fec6f00653e92327b497549e9cc0ffda3c614b1441cf96de1ae" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.062055 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/install-certs-edpm-deployment-openstack-edpm-ipam-925jr" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.185094 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn"] Dec 05 18:05:26 crc kubenswrapper[4961]: E1205 18:05:26.185504 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8b70414-5c28-428e-90be-4e5d82070919" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.185522 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8b70414-5c28-428e-90be-4e5d82070919" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.185734 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8b70414-5c28-428e-90be-4e5d82070919" containerName="install-certs-edpm-deployment-openstack-edpm-ipam" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.186450 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.188650 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.188789 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"ovncontroller-config" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.214021 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.214028 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.214028 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.226670 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn"] Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.342308 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.342566 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.342753 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.342881 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.342970 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nr8kw\" (UniqueName: \"kubernetes.io/projected/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-kube-api-access-nr8kw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.444723 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.444813 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.444887 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.444928 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.444971 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nr8kw\" (UniqueName: \"kubernetes.io/projected/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-kube-api-access-nr8kw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.449372 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovncontroller-config-0\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.465258 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ssh-key\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.470395 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nr8kw\" (UniqueName: \"kubernetes.io/projected/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-kube-api-access-nr8kw\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.471843 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-inventory\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.476365 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovn-combined-ca-bundle\") pod \"ovn-edpm-deployment-openstack-edpm-ipam-nrkgn\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:26 crc kubenswrapper[4961]: I1205 18:05:26.527361 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:05:27 crc kubenswrapper[4961]: I1205 18:05:27.076417 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn"] Dec 05 18:05:28 crc kubenswrapper[4961]: I1205 18:05:28.087172 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" event={"ID":"2428c0ac-375e-4515-9dfd-39f68c6e6ed1","Type":"ContainerStarted","Data":"f97bfbc77a23f9542b3701182213d928c410832e4a1ea37cbfb97bab0329d44f"} Dec 05 18:05:28 crc kubenswrapper[4961]: I1205 18:05:28.087426 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" event={"ID":"2428c0ac-375e-4515-9dfd-39f68c6e6ed1","Type":"ContainerStarted","Data":"34cdf0d6e69b6d1ceb2dbb3501075dcc7c2db30a27b2a739a7fa9d105b1742e2"} Dec 05 18:06:36 crc kubenswrapper[4961]: I1205 18:06:36.971827 4961 generic.go:334] "Generic (PLEG): container finished" podID="2428c0ac-375e-4515-9dfd-39f68c6e6ed1" containerID="f97bfbc77a23f9542b3701182213d928c410832e4a1ea37cbfb97bab0329d44f" exitCode=0 Dec 05 18:06:36 crc kubenswrapper[4961]: I1205 18:06:36.971914 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" event={"ID":"2428c0ac-375e-4515-9dfd-39f68c6e6ed1","Type":"ContainerDied","Data":"f97bfbc77a23f9542b3701182213d928c410832e4a1ea37cbfb97bab0329d44f"} Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.374289 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.556145 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-inventory\") pod \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.556739 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nr8kw\" (UniqueName: \"kubernetes.io/projected/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-kube-api-access-nr8kw\") pod \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.557004 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovncontroller-config-0\") pod \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.557241 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ssh-key\") pod \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.557459 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovn-combined-ca-bundle\") pod \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\" (UID: \"2428c0ac-375e-4515-9dfd-39f68c6e6ed1\") " Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.564432 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovn-combined-ca-bundle" (OuterVolumeSpecName: "ovn-combined-ca-bundle") pod "2428c0ac-375e-4515-9dfd-39f68c6e6ed1" (UID: "2428c0ac-375e-4515-9dfd-39f68c6e6ed1"). InnerVolumeSpecName "ovn-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.567176 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-kube-api-access-nr8kw" (OuterVolumeSpecName: "kube-api-access-nr8kw") pod "2428c0ac-375e-4515-9dfd-39f68c6e6ed1" (UID: "2428c0ac-375e-4515-9dfd-39f68c6e6ed1"). InnerVolumeSpecName "kube-api-access-nr8kw". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.585200 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovncontroller-config-0" (OuterVolumeSpecName: "ovncontroller-config-0") pod "2428c0ac-375e-4515-9dfd-39f68c6e6ed1" (UID: "2428c0ac-375e-4515-9dfd-39f68c6e6ed1"). InnerVolumeSpecName "ovncontroller-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.587931 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "2428c0ac-375e-4515-9dfd-39f68c6e6ed1" (UID: "2428c0ac-375e-4515-9dfd-39f68c6e6ed1"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.599133 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-inventory" (OuterVolumeSpecName: "inventory") pod "2428c0ac-375e-4515-9dfd-39f68c6e6ed1" (UID: "2428c0ac-375e-4515-9dfd-39f68c6e6ed1"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.661839 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.661909 4961 reconciler_common.go:293] "Volume detached for volume \"ovn-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovn-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.661933 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.661953 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nr8kw\" (UniqueName: \"kubernetes.io/projected/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-kube-api-access-nr8kw\") on node \"crc\" DevicePath \"\"" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.661973 4961 reconciler_common.go:293] "Volume detached for volume \"ovncontroller-config-0\" (UniqueName: \"kubernetes.io/configmap/2428c0ac-375e-4515-9dfd-39f68c6e6ed1-ovncontroller-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.995102 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" event={"ID":"2428c0ac-375e-4515-9dfd-39f68c6e6ed1","Type":"ContainerDied","Data":"34cdf0d6e69b6d1ceb2dbb3501075dcc7c2db30a27b2a739a7fa9d105b1742e2"} Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.995152 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="34cdf0d6e69b6d1ceb2dbb3501075dcc7c2db30a27b2a739a7fa9d105b1742e2" Dec 05 18:06:38 crc kubenswrapper[4961]: I1205 18:06:38.995178 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/ovn-edpm-deployment-openstack-edpm-ipam-nrkgn" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.100820 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk"] Dec 05 18:06:39 crc kubenswrapper[4961]: E1205 18:06:39.101457 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2428c0ac-375e-4515-9dfd-39f68c6e6ed1" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.101503 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="2428c0ac-375e-4515-9dfd-39f68c6e6ed1" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.101893 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="2428c0ac-375e-4515-9dfd-39f68c6e6ed1" containerName="ovn-edpm-deployment-openstack-edpm-ipam" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.102814 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.105284 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.105400 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"neutron-ovn-metadata-agent-neutron-config" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.105438 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.105821 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.105884 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.109739 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-metadata-neutron-config" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.116912 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk"] Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.273556 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m79vb\" (UniqueName: \"kubernetes.io/projected/baa4c345-2f59-42ac-a33e-c350c642a73c-kube-api-access-m79vb\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.273606 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.273666 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.273750 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.273976 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.274051 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.376700 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.376859 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.376947 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m79vb\" (UniqueName: \"kubernetes.io/projected/baa4c345-2f59-42ac-a33e-c350c642a73c-kube-api-access-m79vb\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.376984 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.377070 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.377169 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.381948 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-ssh-key\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.382078 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-metadata-combined-ca-bundle\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.382144 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.383029 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-nova-metadata-neutron-config-0\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.383841 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-inventory\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.403144 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m79vb\" (UniqueName: \"kubernetes.io/projected/baa4c345-2f59-42ac-a33e-c350c642a73c-kube-api-access-m79vb\") pod \"neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:39 crc kubenswrapper[4961]: I1205 18:06:39.429290 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:06:40 crc kubenswrapper[4961]: I1205 18:06:40.030227 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk"] Dec 05 18:06:41 crc kubenswrapper[4961]: I1205 18:06:41.012579 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" event={"ID":"baa4c345-2f59-42ac-a33e-c350c642a73c","Type":"ContainerStarted","Data":"6f3d1fa69f8faa3bbb5636b8a9b5396751fd8251a2afd9f513a8d543297f99b6"} Dec 05 18:06:41 crc kubenswrapper[4961]: I1205 18:06:41.013033 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" event={"ID":"baa4c345-2f59-42ac-a33e-c350c642a73c","Type":"ContainerStarted","Data":"dbd59015d6956292a8ef7c1e8b0b38c44edc31d9bf6216337d4d1761624477dd"} Dec 05 18:06:41 crc kubenswrapper[4961]: I1205 18:06:41.049302 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" podStartSLOduration=1.568006688 podStartE2EDuration="2.049274639s" podCreationTimestamp="2025-12-05 18:06:39 +0000 UTC" firstStartedPulling="2025-12-05 18:06:40.034104148 +0000 UTC m=+2006.095254641" lastFinishedPulling="2025-12-05 18:06:40.515372089 +0000 UTC m=+2006.576522592" observedRunningTime="2025-12-05 18:06:41.03636839 +0000 UTC m=+2007.097518883" watchObservedRunningTime="2025-12-05 18:06:41.049274639 +0000 UTC m=+2007.110425122" Dec 05 18:06:57 crc kubenswrapper[4961]: I1205 18:06:57.245765 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:06:57 crc kubenswrapper[4961]: I1205 18:06:57.246553 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:07:27 crc kubenswrapper[4961]: I1205 18:07:27.245497 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:07:27 crc kubenswrapper[4961]: I1205 18:07:27.246100 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:07:33 crc kubenswrapper[4961]: I1205 18:07:33.572103 4961 generic.go:334] "Generic (PLEG): container finished" podID="baa4c345-2f59-42ac-a33e-c350c642a73c" containerID="6f3d1fa69f8faa3bbb5636b8a9b5396751fd8251a2afd9f513a8d543297f99b6" exitCode=0 Dec 05 18:07:33 crc kubenswrapper[4961]: I1205 18:07:33.572237 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" event={"ID":"baa4c345-2f59-42ac-a33e-c350c642a73c","Type":"ContainerDied","Data":"6f3d1fa69f8faa3bbb5636b8a9b5396751fd8251a2afd9f513a8d543297f99b6"} Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.071508 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.217631 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m79vb\" (UniqueName: \"kubernetes.io/projected/baa4c345-2f59-42ac-a33e-c350c642a73c-kube-api-access-m79vb\") pod \"baa4c345-2f59-42ac-a33e-c350c642a73c\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.217703 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-ovn-metadata-agent-neutron-config-0\") pod \"baa4c345-2f59-42ac-a33e-c350c642a73c\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.217859 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-nova-metadata-neutron-config-0\") pod \"baa4c345-2f59-42ac-a33e-c350c642a73c\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.217897 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-ssh-key\") pod \"baa4c345-2f59-42ac-a33e-c350c642a73c\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.217983 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-metadata-combined-ca-bundle\") pod \"baa4c345-2f59-42ac-a33e-c350c642a73c\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.218042 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-inventory\") pod \"baa4c345-2f59-42ac-a33e-c350c642a73c\" (UID: \"baa4c345-2f59-42ac-a33e-c350c642a73c\") " Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.224361 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/baa4c345-2f59-42ac-a33e-c350c642a73c-kube-api-access-m79vb" (OuterVolumeSpecName: "kube-api-access-m79vb") pod "baa4c345-2f59-42ac-a33e-c350c642a73c" (UID: "baa4c345-2f59-42ac-a33e-c350c642a73c"). InnerVolumeSpecName "kube-api-access-m79vb". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.225197 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-metadata-combined-ca-bundle" (OuterVolumeSpecName: "neutron-metadata-combined-ca-bundle") pod "baa4c345-2f59-42ac-a33e-c350c642a73c" (UID: "baa4c345-2f59-42ac-a33e-c350c642a73c"). InnerVolumeSpecName "neutron-metadata-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.251178 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-nova-metadata-neutron-config-0" (OuterVolumeSpecName: "nova-metadata-neutron-config-0") pod "baa4c345-2f59-42ac-a33e-c350c642a73c" (UID: "baa4c345-2f59-42ac-a33e-c350c642a73c"). InnerVolumeSpecName "nova-metadata-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.252089 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-inventory" (OuterVolumeSpecName: "inventory") pod "baa4c345-2f59-42ac-a33e-c350c642a73c" (UID: "baa4c345-2f59-42ac-a33e-c350c642a73c"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.253340 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "baa4c345-2f59-42ac-a33e-c350c642a73c" (UID: "baa4c345-2f59-42ac-a33e-c350c642a73c"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.271118 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-ovn-metadata-agent-neutron-config-0" (OuterVolumeSpecName: "neutron-ovn-metadata-agent-neutron-config-0") pod "baa4c345-2f59-42ac-a33e-c350c642a73c" (UID: "baa4c345-2f59-42ac-a33e-c350c642a73c"). InnerVolumeSpecName "neutron-ovn-metadata-agent-neutron-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.320993 4961 reconciler_common.go:293] "Volume detached for volume \"nova-metadata-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-nova-metadata-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.321025 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.321035 4961 reconciler_common.go:293] "Volume detached for volume \"neutron-metadata-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-metadata-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.321051 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.321088 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m79vb\" (UniqueName: \"kubernetes.io/projected/baa4c345-2f59-42ac-a33e-c350c642a73c-kube-api-access-m79vb\") on node \"crc\" DevicePath \"\"" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.321098 4961 reconciler_common.go:293] "Volume detached for volume \"neutron-ovn-metadata-agent-neutron-config-0\" (UniqueName: \"kubernetes.io/secret/baa4c345-2f59-42ac-a33e-c350c642a73c-neutron-ovn-metadata-agent-neutron-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.599824 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" event={"ID":"baa4c345-2f59-42ac-a33e-c350c642a73c","Type":"ContainerDied","Data":"dbd59015d6956292a8ef7c1e8b0b38c44edc31d9bf6216337d4d1761624477dd"} Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.599863 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dbd59015d6956292a8ef7c1e8b0b38c44edc31d9bf6216337d4d1761624477dd" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.599926 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.740244 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb"] Dec 05 18:07:35 crc kubenswrapper[4961]: E1205 18:07:35.740703 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="baa4c345-2f59-42ac-a33e-c350c642a73c" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.740722 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="baa4c345-2f59-42ac-a33e-c350c642a73c" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.740930 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="baa4c345-2f59-42ac-a33e-c350c642a73c" containerName="neutron-metadata-edpm-deployment-openstack-edpm-ipam" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.741577 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.743806 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.744245 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.744426 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.746140 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.747302 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"libvirt-secret" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.753277 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb"] Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.833103 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.833201 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.833236 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.833269 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.833486 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jfrzx\" (UniqueName: \"kubernetes.io/projected/350a4c11-1d87-4f63-8ec8-c808de6e46b0-kube-api-access-jfrzx\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.935417 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.935508 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.935538 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.935569 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.935613 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jfrzx\" (UniqueName: \"kubernetes.io/projected/350a4c11-1d87-4f63-8ec8-c808de6e46b0-kube-api-access-jfrzx\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.939764 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-secret-0\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.940123 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-ssh-key\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.940754 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-inventory\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.942097 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-combined-ca-bundle\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:35 crc kubenswrapper[4961]: I1205 18:07:35.961426 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jfrzx\" (UniqueName: \"kubernetes.io/projected/350a4c11-1d87-4f63-8ec8-c808de6e46b0-kube-api-access-jfrzx\") pod \"libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:36 crc kubenswrapper[4961]: I1205 18:07:36.106658 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:07:36 crc kubenswrapper[4961]: I1205 18:07:36.679285 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb"] Dec 05 18:07:37 crc kubenswrapper[4961]: I1205 18:07:37.624253 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" event={"ID":"350a4c11-1d87-4f63-8ec8-c808de6e46b0","Type":"ContainerStarted","Data":"10905a19c4683d50ea888517c3106d8b424916c61a00c0dcd9d05db76ee77126"} Dec 05 18:07:37 crc kubenswrapper[4961]: I1205 18:07:37.624697 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" event={"ID":"350a4c11-1d87-4f63-8ec8-c808de6e46b0","Type":"ContainerStarted","Data":"04e6703312a24301521723798a97d550d33c2312d2d53a9a13b9af1513b30391"} Dec 05 18:07:37 crc kubenswrapper[4961]: I1205 18:07:37.652059 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" podStartSLOduration=2.231307283 podStartE2EDuration="2.652043338s" podCreationTimestamp="2025-12-05 18:07:35 +0000 UTC" firstStartedPulling="2025-12-05 18:07:36.687024306 +0000 UTC m=+2062.748174819" lastFinishedPulling="2025-12-05 18:07:37.107760401 +0000 UTC m=+2063.168910874" observedRunningTime="2025-12-05 18:07:37.647715112 +0000 UTC m=+2063.708865595" watchObservedRunningTime="2025-12-05 18:07:37.652043338 +0000 UTC m=+2063.713193811" Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.246531 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.248521 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.248677 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.249957 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"a4dfbf02cada67058f2ed254a8aeb3929d2402207851cbed84f73238ae936112"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.250127 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://a4dfbf02cada67058f2ed254a8aeb3929d2402207851cbed84f73238ae936112" gracePeriod=600 Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.846462 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="a4dfbf02cada67058f2ed254a8aeb3929d2402207851cbed84f73238ae936112" exitCode=0 Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.846533 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"a4dfbf02cada67058f2ed254a8aeb3929d2402207851cbed84f73238ae936112"} Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.847061 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b"} Dec 05 18:07:57 crc kubenswrapper[4961]: I1205 18:07:57.847142 4961 scope.go:117] "RemoveContainer" containerID="d5a16dd83673fa9bed0960ec70652c325d6063efa960c60ce018243ea9dc62e9" Dec 05 18:09:57 crc kubenswrapper[4961]: I1205 18:09:57.245477 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:09:57 crc kubenswrapper[4961]: I1205 18:09:57.246103 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.784488 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-xqjmq"] Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.789142 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.800186 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-utilities\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.800260 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-catalog-content\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.800481 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm8tz\" (UniqueName: \"kubernetes.io/projected/e89659e6-c9ef-4904-ba51-6bb473f98732-kube-api-access-cm8tz\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.807411 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xqjmq"] Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.902420 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-utilities\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.902657 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-catalog-content\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.902818 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm8tz\" (UniqueName: \"kubernetes.io/projected/e89659e6-c9ef-4904-ba51-6bb473f98732-kube-api-access-cm8tz\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.902998 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-utilities\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.903906 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-catalog-content\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:09:59 crc kubenswrapper[4961]: I1205 18:09:59.926499 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm8tz\" (UniqueName: \"kubernetes.io/projected/e89659e6-c9ef-4904-ba51-6bb473f98732-kube-api-access-cm8tz\") pod \"certified-operators-xqjmq\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:00 crc kubenswrapper[4961]: I1205 18:10:00.113399 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:00 crc kubenswrapper[4961]: I1205 18:10:00.615715 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-xqjmq"] Dec 05 18:10:01 crc kubenswrapper[4961]: I1205 18:10:01.280513 4961 generic.go:334] "Generic (PLEG): container finished" podID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerID="ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276" exitCode=0 Dec 05 18:10:01 crc kubenswrapper[4961]: I1205 18:10:01.280603 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerDied","Data":"ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276"} Dec 05 18:10:01 crc kubenswrapper[4961]: I1205 18:10:01.281662 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerStarted","Data":"21316de76d0b5b70eee9795608f149fdc93b74ba2dac9082751128d1f2dc94c1"} Dec 05 18:10:01 crc kubenswrapper[4961]: I1205 18:10:01.282768 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:10:02 crc kubenswrapper[4961]: I1205 18:10:02.290744 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerStarted","Data":"277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2"} Dec 05 18:10:03 crc kubenswrapper[4961]: I1205 18:10:03.305622 4961 generic.go:334] "Generic (PLEG): container finished" podID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerID="277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2" exitCode=0 Dec 05 18:10:03 crc kubenswrapper[4961]: I1205 18:10:03.305965 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerDied","Data":"277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2"} Dec 05 18:10:04 crc kubenswrapper[4961]: I1205 18:10:04.321105 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerStarted","Data":"9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de"} Dec 05 18:10:04 crc kubenswrapper[4961]: I1205 18:10:04.368383 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-xqjmq" podStartSLOduration=2.931832622 podStartE2EDuration="5.368362241s" podCreationTimestamp="2025-12-05 18:09:59 +0000 UTC" firstStartedPulling="2025-12-05 18:10:01.28250895 +0000 UTC m=+2207.343659433" lastFinishedPulling="2025-12-05 18:10:03.719038539 +0000 UTC m=+2209.780189052" observedRunningTime="2025-12-05 18:10:04.358340173 +0000 UTC m=+2210.419490656" watchObservedRunningTime="2025-12-05 18:10:04.368362241 +0000 UTC m=+2210.429512724" Dec 05 18:10:10 crc kubenswrapper[4961]: I1205 18:10:10.114316 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:10 crc kubenswrapper[4961]: I1205 18:10:10.114925 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:10 crc kubenswrapper[4961]: I1205 18:10:10.163303 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:10 crc kubenswrapper[4961]: I1205 18:10:10.417626 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:10 crc kubenswrapper[4961]: I1205 18:10:10.472467 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xqjmq"] Dec 05 18:10:12 crc kubenswrapper[4961]: I1205 18:10:12.403675 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-xqjmq" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="registry-server" containerID="cri-o://9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de" gracePeriod=2 Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.389142 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.409013 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-catalog-content\") pod \"e89659e6-c9ef-4904-ba51-6bb473f98732\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.409066 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-utilities\") pod \"e89659e6-c9ef-4904-ba51-6bb473f98732\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.409128 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cm8tz\" (UniqueName: \"kubernetes.io/projected/e89659e6-c9ef-4904-ba51-6bb473f98732-kube-api-access-cm8tz\") pod \"e89659e6-c9ef-4904-ba51-6bb473f98732\" (UID: \"e89659e6-c9ef-4904-ba51-6bb473f98732\") " Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.411522 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-utilities" (OuterVolumeSpecName: "utilities") pod "e89659e6-c9ef-4904-ba51-6bb473f98732" (UID: "e89659e6-c9ef-4904-ba51-6bb473f98732"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.415237 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e89659e6-c9ef-4904-ba51-6bb473f98732-kube-api-access-cm8tz" (OuterVolumeSpecName: "kube-api-access-cm8tz") pod "e89659e6-c9ef-4904-ba51-6bb473f98732" (UID: "e89659e6-c9ef-4904-ba51-6bb473f98732"). InnerVolumeSpecName "kube-api-access-cm8tz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.417260 4961 generic.go:334] "Generic (PLEG): container finished" podID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerID="9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de" exitCode=0 Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.417300 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerDied","Data":"9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de"} Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.417324 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-xqjmq" event={"ID":"e89659e6-c9ef-4904-ba51-6bb473f98732","Type":"ContainerDied","Data":"21316de76d0b5b70eee9795608f149fdc93b74ba2dac9082751128d1f2dc94c1"} Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.417344 4961 scope.go:117] "RemoveContainer" containerID="9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.417356 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-xqjmq" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.469563 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e89659e6-c9ef-4904-ba51-6bb473f98732" (UID: "e89659e6-c9ef-4904-ba51-6bb473f98732"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.470581 4961 scope.go:117] "RemoveContainer" containerID="277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.491051 4961 scope.go:117] "RemoveContainer" containerID="ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.511711 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.511751 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e89659e6-c9ef-4904-ba51-6bb473f98732-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.511766 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cm8tz\" (UniqueName: \"kubernetes.io/projected/e89659e6-c9ef-4904-ba51-6bb473f98732-kube-api-access-cm8tz\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.537370 4961 scope.go:117] "RemoveContainer" containerID="9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de" Dec 05 18:10:13 crc kubenswrapper[4961]: E1205 18:10:13.537983 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de\": container with ID starting with 9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de not found: ID does not exist" containerID="9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.538112 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de"} err="failed to get container status \"9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de\": rpc error: code = NotFound desc = could not find container \"9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de\": container with ID starting with 9da2ad95c5bd1cc77fb60b26b3e6da6c721f47c9a7c05bbce5b9daaaea0a20de not found: ID does not exist" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.538201 4961 scope.go:117] "RemoveContainer" containerID="277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2" Dec 05 18:10:13 crc kubenswrapper[4961]: E1205 18:10:13.538607 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2\": container with ID starting with 277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2 not found: ID does not exist" containerID="277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.538649 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2"} err="failed to get container status \"277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2\": rpc error: code = NotFound desc = could not find container \"277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2\": container with ID starting with 277096f953cce37f914fb0b7f8a5105e00b3c80c3fcc5455d170d081228972f2 not found: ID does not exist" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.538676 4961 scope.go:117] "RemoveContainer" containerID="ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276" Dec 05 18:10:13 crc kubenswrapper[4961]: E1205 18:10:13.538980 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276\": container with ID starting with ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276 not found: ID does not exist" containerID="ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.539062 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276"} err="failed to get container status \"ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276\": rpc error: code = NotFound desc = could not find container \"ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276\": container with ID starting with ed2be5b6aa33a010fd62f390f15e60937609630e277beedc829b899737051276 not found: ID does not exist" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.759222 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-xqjmq"] Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.767066 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-xqjmq"] Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.818815 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-nvqmt"] Dec 05 18:10:13 crc kubenswrapper[4961]: E1205 18:10:13.819270 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="extract-content" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.819301 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="extract-content" Dec 05 18:10:13 crc kubenswrapper[4961]: E1205 18:10:13.819344 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="extract-utilities" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.819353 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="extract-utilities" Dec 05 18:10:13 crc kubenswrapper[4961]: E1205 18:10:13.819368 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="registry-server" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.819377 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="registry-server" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.819641 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" containerName="registry-server" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.821288 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.834317 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nvqmt"] Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.919590 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.919697 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wphp4\" (UniqueName: \"kubernetes.io/projected/90f9ef10-2686-4315-ae98-45e93c8781e3-kube-api-access-wphp4\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:13 crc kubenswrapper[4961]: I1205 18:10:13.919836 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-utilities\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.021425 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-utilities\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.021896 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.022037 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wphp4\" (UniqueName: \"kubernetes.io/projected/90f9ef10-2686-4315-ae98-45e93c8781e3-kube-api-access-wphp4\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.021930 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-utilities\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.022271 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.043633 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wphp4\" (UniqueName: \"kubernetes.io/projected/90f9ef10-2686-4315-ae98-45e93c8781e3-kube-api-access-wphp4\") pod \"redhat-operators-nvqmt\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.154063 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.662403 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-nvqmt"] Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.816376 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-7fm5l"] Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.825511 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.834918 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7fm5l"] Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.887265 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e89659e6-c9ef-4904-ba51-6bb473f98732" path="/var/lib/kubelet/pods/e89659e6-c9ef-4904-ba51-6bb473f98732/volumes" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.943120 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-utilities\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.943181 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-catalog-content\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:14 crc kubenswrapper[4961]: I1205 18:10:14.943213 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kflfv\" (UniqueName: \"kubernetes.io/projected/ffb47854-8abd-40f4-b157-ce2bd0953361-kube-api-access-kflfv\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.045007 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-utilities\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.045060 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-catalog-content\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.045428 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kflfv\" (UniqueName: \"kubernetes.io/projected/ffb47854-8abd-40f4-b157-ce2bd0953361-kube-api-access-kflfv\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.045641 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-utilities\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.045729 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-catalog-content\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.069736 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kflfv\" (UniqueName: \"kubernetes.io/projected/ffb47854-8abd-40f4-b157-ce2bd0953361-kube-api-access-kflfv\") pod \"community-operators-7fm5l\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.169091 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.476320 4961 generic.go:334] "Generic (PLEG): container finished" podID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerID="9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a" exitCode=0 Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.476367 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerDied","Data":"9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a"} Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.476393 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerStarted","Data":"7c7521b53a443cdeb81d3dce5f8e9f8b89eddb335052241e346dd5c084fb6f20"} Dec 05 18:10:15 crc kubenswrapper[4961]: I1205 18:10:15.527068 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-7fm5l"] Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.222652 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7rh7t"] Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.228481 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.235664 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rh7t"] Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.378284 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6ht6\" (UniqueName: \"kubernetes.io/projected/a6001a66-8816-4976-b49e-d28ec0185d4b-kube-api-access-g6ht6\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.378414 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-utilities\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.378470 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-catalog-content\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.480322 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-utilities\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.480390 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-catalog-content\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.480490 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6ht6\" (UniqueName: \"kubernetes.io/projected/a6001a66-8816-4976-b49e-d28ec0185d4b-kube-api-access-g6ht6\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.481025 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-utilities\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.481085 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-catalog-content\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.485746 4961 generic.go:334] "Generic (PLEG): container finished" podID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerID="5f133ca49c92fa6aff399df53dffce3bb000478277dffd20f692e23d486a986b" exitCode=0 Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.485846 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerDied","Data":"5f133ca49c92fa6aff399df53dffce3bb000478277dffd20f692e23d486a986b"} Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.495572 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerStarted","Data":"04ec0bea8acb68b00c6bc602190706a54f949963980704b2fe71c42520e1fa5b"} Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.495614 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerStarted","Data":"6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb"} Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.501364 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6ht6\" (UniqueName: \"kubernetes.io/projected/a6001a66-8816-4976-b49e-d28ec0185d4b-kube-api-access-g6ht6\") pod \"redhat-marketplace-7rh7t\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:16 crc kubenswrapper[4961]: I1205 18:10:16.561257 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:17 crc kubenswrapper[4961]: I1205 18:10:17.012119 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rh7t"] Dec 05 18:10:17 crc kubenswrapper[4961]: I1205 18:10:17.502584 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerStarted","Data":"1d6e054ea34e4e99848a3d3d6e887da704a20613cb9f677db8587b58f949f2a5"} Dec 05 18:10:17 crc kubenswrapper[4961]: I1205 18:10:17.502912 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerStarted","Data":"f5332719ef0118d84fb65cb7c71e77a9245b867a4061eea6f2d46d382b707ec7"} Dec 05 18:10:18 crc kubenswrapper[4961]: I1205 18:10:18.514252 4961 generic.go:334] "Generic (PLEG): container finished" podID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerID="6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb" exitCode=0 Dec 05 18:10:18 crc kubenswrapper[4961]: I1205 18:10:18.514302 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerDied","Data":"6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb"} Dec 05 18:10:18 crc kubenswrapper[4961]: I1205 18:10:18.517230 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerStarted","Data":"44060a69c028a57cb65e4819631450175490d623af12975dc8c8ddc84764d89c"} Dec 05 18:10:19 crc kubenswrapper[4961]: I1205 18:10:19.527380 4961 generic.go:334] "Generic (PLEG): container finished" podID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerID="1d6e054ea34e4e99848a3d3d6e887da704a20613cb9f677db8587b58f949f2a5" exitCode=0 Dec 05 18:10:19 crc kubenswrapper[4961]: I1205 18:10:19.527573 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerDied","Data":"1d6e054ea34e4e99848a3d3d6e887da704a20613cb9f677db8587b58f949f2a5"} Dec 05 18:10:20 crc kubenswrapper[4961]: I1205 18:10:20.539942 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerStarted","Data":"2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c"} Dec 05 18:10:20 crc kubenswrapper[4961]: I1205 18:10:20.543303 4961 generic.go:334] "Generic (PLEG): container finished" podID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerID="44060a69c028a57cb65e4819631450175490d623af12975dc8c8ddc84764d89c" exitCode=0 Dec 05 18:10:20 crc kubenswrapper[4961]: I1205 18:10:20.543341 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerDied","Data":"44060a69c028a57cb65e4819631450175490d623af12975dc8c8ddc84764d89c"} Dec 05 18:10:20 crc kubenswrapper[4961]: I1205 18:10:20.569132 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-nvqmt" podStartSLOduration=3.012417598 podStartE2EDuration="7.569106202s" podCreationTimestamp="2025-12-05 18:10:13 +0000 UTC" firstStartedPulling="2025-12-05 18:10:15.49192482 +0000 UTC m=+2221.553075293" lastFinishedPulling="2025-12-05 18:10:20.048613424 +0000 UTC m=+2226.109763897" observedRunningTime="2025-12-05 18:10:20.561235909 +0000 UTC m=+2226.622386402" watchObservedRunningTime="2025-12-05 18:10:20.569106202 +0000 UTC m=+2226.630256675" Dec 05 18:10:21 crc kubenswrapper[4961]: I1205 18:10:21.556152 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerStarted","Data":"2f96b156e5a0ab7395e406f6f62619d1e5008813327877ccaa5a95e78eea4eb0"} Dec 05 18:10:21 crc kubenswrapper[4961]: I1205 18:10:21.557811 4961 generic.go:334] "Generic (PLEG): container finished" podID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerID="bcc748971e14c66d9568b4a5b2dbc45f7a1bd077eeb5869ea0270a343c300fe1" exitCode=0 Dec 05 18:10:21 crc kubenswrapper[4961]: I1205 18:10:21.557860 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerDied","Data":"bcc748971e14c66d9568b4a5b2dbc45f7a1bd077eeb5869ea0270a343c300fe1"} Dec 05 18:10:21 crc kubenswrapper[4961]: I1205 18:10:21.584943 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-7fm5l" podStartSLOduration=3.081284743 podStartE2EDuration="7.584927421s" podCreationTimestamp="2025-12-05 18:10:14 +0000 UTC" firstStartedPulling="2025-12-05 18:10:16.487515189 +0000 UTC m=+2222.548665662" lastFinishedPulling="2025-12-05 18:10:20.991157867 +0000 UTC m=+2227.052308340" observedRunningTime="2025-12-05 18:10:21.583698071 +0000 UTC m=+2227.644848544" watchObservedRunningTime="2025-12-05 18:10:21.584927421 +0000 UTC m=+2227.646077884" Dec 05 18:10:22 crc kubenswrapper[4961]: I1205 18:10:22.573455 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerStarted","Data":"28d5389266895a570ccf0981a00915c980065ca798575a3005df3d7328e18fe6"} Dec 05 18:10:22 crc kubenswrapper[4961]: I1205 18:10:22.599947 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7rh7t" podStartSLOduration=4.102762458 podStartE2EDuration="6.599924639s" podCreationTimestamp="2025-12-05 18:10:16 +0000 UTC" firstStartedPulling="2025-12-05 18:10:19.52956174 +0000 UTC m=+2225.590712213" lastFinishedPulling="2025-12-05 18:10:22.026723921 +0000 UTC m=+2228.087874394" observedRunningTime="2025-12-05 18:10:22.592435764 +0000 UTC m=+2228.653586237" watchObservedRunningTime="2025-12-05 18:10:22.599924639 +0000 UTC m=+2228.661075112" Dec 05 18:10:24 crc kubenswrapper[4961]: I1205 18:10:24.154904 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:24 crc kubenswrapper[4961]: I1205 18:10:24.155264 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:25 crc kubenswrapper[4961]: I1205 18:10:25.170279 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:25 crc kubenswrapper[4961]: I1205 18:10:25.170534 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:25 crc kubenswrapper[4961]: I1205 18:10:25.205334 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-nvqmt" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="registry-server" probeResult="failure" output=< Dec 05 18:10:25 crc kubenswrapper[4961]: timeout: failed to connect service ":50051" within 1s Dec 05 18:10:25 crc kubenswrapper[4961]: > Dec 05 18:10:25 crc kubenswrapper[4961]: I1205 18:10:25.224948 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:26 crc kubenswrapper[4961]: I1205 18:10:26.561501 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:26 crc kubenswrapper[4961]: I1205 18:10:26.561755 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:26 crc kubenswrapper[4961]: I1205 18:10:26.610467 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:27 crc kubenswrapper[4961]: I1205 18:10:27.245163 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:10:27 crc kubenswrapper[4961]: I1205 18:10:27.245216 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:10:34 crc kubenswrapper[4961]: I1205 18:10:34.226270 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:34 crc kubenswrapper[4961]: I1205 18:10:34.291179 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:34 crc kubenswrapper[4961]: I1205 18:10:34.471983 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nvqmt"] Dec 05 18:10:35 crc kubenswrapper[4961]: I1205 18:10:35.231202 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:35 crc kubenswrapper[4961]: I1205 18:10:35.696247 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-nvqmt" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="registry-server" containerID="cri-o://2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c" gracePeriod=2 Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.174162 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.303328 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content\") pod \"90f9ef10-2686-4315-ae98-45e93c8781e3\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.304371 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-utilities\") pod \"90f9ef10-2686-4315-ae98-45e93c8781e3\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.304488 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wphp4\" (UniqueName: \"kubernetes.io/projected/90f9ef10-2686-4315-ae98-45e93c8781e3-kube-api-access-wphp4\") pod \"90f9ef10-2686-4315-ae98-45e93c8781e3\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.305074 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-utilities" (OuterVolumeSpecName: "utilities") pod "90f9ef10-2686-4315-ae98-45e93c8781e3" (UID: "90f9ef10-2686-4315-ae98-45e93c8781e3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.321049 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/90f9ef10-2686-4315-ae98-45e93c8781e3-kube-api-access-wphp4" (OuterVolumeSpecName: "kube-api-access-wphp4") pod "90f9ef10-2686-4315-ae98-45e93c8781e3" (UID: "90f9ef10-2686-4315-ae98-45e93c8781e3"). InnerVolumeSpecName "kube-api-access-wphp4". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.405634 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90f9ef10-2686-4315-ae98-45e93c8781e3" (UID: "90f9ef10-2686-4315-ae98-45e93c8781e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.406045 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content\") pod \"90f9ef10-2686-4315-ae98-45e93c8781e3\" (UID: \"90f9ef10-2686-4315-ae98-45e93c8781e3\") " Dec 05 18:10:36 crc kubenswrapper[4961]: W1205 18:10:36.406419 4961 empty_dir.go:500] Warning: Unmount skipped because path does not exist: /var/lib/kubelet/pods/90f9ef10-2686-4315-ae98-45e93c8781e3/volumes/kubernetes.io~empty-dir/catalog-content Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.406438 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "90f9ef10-2686-4315-ae98-45e93c8781e3" (UID: "90f9ef10-2686-4315-ae98-45e93c8781e3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.406750 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.406816 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wphp4\" (UniqueName: \"kubernetes.io/projected/90f9ef10-2686-4315-ae98-45e93c8781e3-kube-api-access-wphp4\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.406831 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/90f9ef10-2686-4315-ae98-45e93c8781e3-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.617146 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.707800 4961 generic.go:334] "Generic (PLEG): container finished" podID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerID="2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c" exitCode=0 Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.707858 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerDied","Data":"2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c"} Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.707896 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-nvqmt" event={"ID":"90f9ef10-2686-4315-ae98-45e93c8781e3","Type":"ContainerDied","Data":"7c7521b53a443cdeb81d3dce5f8e9f8b89eddb335052241e346dd5c084fb6f20"} Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.707929 4961 scope.go:117] "RemoveContainer" containerID="2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.708141 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-nvqmt" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.737121 4961 scope.go:117] "RemoveContainer" containerID="6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.763866 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-nvqmt"] Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.774892 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-nvqmt"] Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.782277 4961 scope.go:117] "RemoveContainer" containerID="9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.823516 4961 scope.go:117] "RemoveContainer" containerID="2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c" Dec 05 18:10:36 crc kubenswrapper[4961]: E1205 18:10:36.824229 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c\": container with ID starting with 2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c not found: ID does not exist" containerID="2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.824284 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c"} err="failed to get container status \"2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c\": rpc error: code = NotFound desc = could not find container \"2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c\": container with ID starting with 2ef031b541db62ae9b820f5634035f3672f98e887c124f23522cdb66b538cb9c not found: ID does not exist" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.824316 4961 scope.go:117] "RemoveContainer" containerID="6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb" Dec 05 18:10:36 crc kubenswrapper[4961]: E1205 18:10:36.824791 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb\": container with ID starting with 6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb not found: ID does not exist" containerID="6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.824824 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb"} err="failed to get container status \"6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb\": rpc error: code = NotFound desc = could not find container \"6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb\": container with ID starting with 6978c27c3d89b83d8977e5ef76d784fada3d5d82b1dca4e2d4f3cefafd7991cb not found: ID does not exist" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.824843 4961 scope.go:117] "RemoveContainer" containerID="9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a" Dec 05 18:10:36 crc kubenswrapper[4961]: E1205 18:10:36.825120 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a\": container with ID starting with 9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a not found: ID does not exist" containerID="9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.825153 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a"} err="failed to get container status \"9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a\": rpc error: code = NotFound desc = could not find container \"9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a\": container with ID starting with 9138f10bce30de2c003758ca618c0e2d4da5127e1b0c2dac1c3243310704315a not found: ID does not exist" Dec 05 18:10:36 crc kubenswrapper[4961]: I1205 18:10:36.875454 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" path="/var/lib/kubelet/pods/90f9ef10-2686-4315-ae98-45e93c8781e3/volumes" Dec 05 18:10:37 crc kubenswrapper[4961]: I1205 18:10:37.467108 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7fm5l"] Dec 05 18:10:37 crc kubenswrapper[4961]: I1205 18:10:37.467358 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-7fm5l" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="registry-server" containerID="cri-o://2f96b156e5a0ab7395e406f6f62619d1e5008813327877ccaa5a95e78eea4eb0" gracePeriod=2 Dec 05 18:10:37 crc kubenswrapper[4961]: I1205 18:10:37.722282 4961 generic.go:334] "Generic (PLEG): container finished" podID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerID="2f96b156e5a0ab7395e406f6f62619d1e5008813327877ccaa5a95e78eea4eb0" exitCode=0 Dec 05 18:10:37 crc kubenswrapper[4961]: I1205 18:10:37.722319 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerDied","Data":"2f96b156e5a0ab7395e406f6f62619d1e5008813327877ccaa5a95e78eea4eb0"} Dec 05 18:10:37 crc kubenswrapper[4961]: I1205 18:10:37.915066 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.042332 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kflfv\" (UniqueName: \"kubernetes.io/projected/ffb47854-8abd-40f4-b157-ce2bd0953361-kube-api-access-kflfv\") pod \"ffb47854-8abd-40f4-b157-ce2bd0953361\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.042510 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-utilities\") pod \"ffb47854-8abd-40f4-b157-ce2bd0953361\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.042600 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-catalog-content\") pod \"ffb47854-8abd-40f4-b157-ce2bd0953361\" (UID: \"ffb47854-8abd-40f4-b157-ce2bd0953361\") " Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.044766 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-utilities" (OuterVolumeSpecName: "utilities") pod "ffb47854-8abd-40f4-b157-ce2bd0953361" (UID: "ffb47854-8abd-40f4-b157-ce2bd0953361"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.049473 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffb47854-8abd-40f4-b157-ce2bd0953361-kube-api-access-kflfv" (OuterVolumeSpecName: "kube-api-access-kflfv") pod "ffb47854-8abd-40f4-b157-ce2bd0953361" (UID: "ffb47854-8abd-40f4-b157-ce2bd0953361"). InnerVolumeSpecName "kube-api-access-kflfv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.092505 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ffb47854-8abd-40f4-b157-ce2bd0953361" (UID: "ffb47854-8abd-40f4-b157-ce2bd0953361"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.144534 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.145393 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kflfv\" (UniqueName: \"kubernetes.io/projected/ffb47854-8abd-40f4-b157-ce2bd0953361-kube-api-access-kflfv\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.145529 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ffb47854-8abd-40f4-b157-ce2bd0953361-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.737500 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-7fm5l" event={"ID":"ffb47854-8abd-40f4-b157-ce2bd0953361","Type":"ContainerDied","Data":"04ec0bea8acb68b00c6bc602190706a54f949963980704b2fe71c42520e1fa5b"} Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.737570 4961 scope.go:117] "RemoveContainer" containerID="2f96b156e5a0ab7395e406f6f62619d1e5008813327877ccaa5a95e78eea4eb0" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.737599 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-7fm5l" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.791341 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-7fm5l"] Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.808520 4961 scope.go:117] "RemoveContainer" containerID="44060a69c028a57cb65e4819631450175490d623af12975dc8c8ddc84764d89c" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.828574 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-7fm5l"] Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.840608 4961 scope.go:117] "RemoveContainer" containerID="5f133ca49c92fa6aff399df53dffce3bb000478277dffd20f692e23d486a986b" Dec 05 18:10:38 crc kubenswrapper[4961]: I1205 18:10:38.880534 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" path="/var/lib/kubelet/pods/ffb47854-8abd-40f4-b157-ce2bd0953361/volumes" Dec 05 18:10:39 crc kubenswrapper[4961]: I1205 18:10:39.867957 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rh7t"] Dec 05 18:10:39 crc kubenswrapper[4961]: I1205 18:10:39.869032 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7rh7t" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="registry-server" containerID="cri-o://28d5389266895a570ccf0981a00915c980065ca798575a3005df3d7328e18fe6" gracePeriod=2 Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.757619 4961 generic.go:334] "Generic (PLEG): container finished" podID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerID="28d5389266895a570ccf0981a00915c980065ca798575a3005df3d7328e18fe6" exitCode=0 Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.757971 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerDied","Data":"28d5389266895a570ccf0981a00915c980065ca798575a3005df3d7328e18fe6"} Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.758002 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7rh7t" event={"ID":"a6001a66-8816-4976-b49e-d28ec0185d4b","Type":"ContainerDied","Data":"f5332719ef0118d84fb65cb7c71e77a9245b867a4061eea6f2d46d382b707ec7"} Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.758017 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f5332719ef0118d84fb65cb7c71e77a9245b867a4061eea6f2d46d382b707ec7" Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.817708 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.902171 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6ht6\" (UniqueName: \"kubernetes.io/projected/a6001a66-8816-4976-b49e-d28ec0185d4b-kube-api-access-g6ht6\") pod \"a6001a66-8816-4976-b49e-d28ec0185d4b\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.902402 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-catalog-content\") pod \"a6001a66-8816-4976-b49e-d28ec0185d4b\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.902544 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-utilities\") pod \"a6001a66-8816-4976-b49e-d28ec0185d4b\" (UID: \"a6001a66-8816-4976-b49e-d28ec0185d4b\") " Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.903994 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-utilities" (OuterVolumeSpecName: "utilities") pod "a6001a66-8816-4976-b49e-d28ec0185d4b" (UID: "a6001a66-8816-4976-b49e-d28ec0185d4b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.907960 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6001a66-8816-4976-b49e-d28ec0185d4b-kube-api-access-g6ht6" (OuterVolumeSpecName: "kube-api-access-g6ht6") pod "a6001a66-8816-4976-b49e-d28ec0185d4b" (UID: "a6001a66-8816-4976-b49e-d28ec0185d4b"). InnerVolumeSpecName "kube-api-access-g6ht6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:10:40 crc kubenswrapper[4961]: I1205 18:10:40.920438 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a6001a66-8816-4976-b49e-d28ec0185d4b" (UID: "a6001a66-8816-4976-b49e-d28ec0185d4b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:10:41 crc kubenswrapper[4961]: I1205 18:10:41.004580 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:41 crc kubenswrapper[4961]: I1205 18:10:41.004619 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6ht6\" (UniqueName: \"kubernetes.io/projected/a6001a66-8816-4976-b49e-d28ec0185d4b-kube-api-access-g6ht6\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:41 crc kubenswrapper[4961]: I1205 18:10:41.004630 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a6001a66-8816-4976-b49e-d28ec0185d4b-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:10:41 crc kubenswrapper[4961]: I1205 18:10:41.771049 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7rh7t" Dec 05 18:10:41 crc kubenswrapper[4961]: I1205 18:10:41.833361 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rh7t"] Dec 05 18:10:41 crc kubenswrapper[4961]: I1205 18:10:41.845794 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7rh7t"] Dec 05 18:10:42 crc kubenswrapper[4961]: I1205 18:10:42.879524 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" path="/var/lib/kubelet/pods/a6001a66-8816-4976-b49e-d28ec0185d4b/volumes" Dec 05 18:10:57 crc kubenswrapper[4961]: I1205 18:10:57.245209 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:10:57 crc kubenswrapper[4961]: I1205 18:10:57.245886 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:10:57 crc kubenswrapper[4961]: I1205 18:10:57.245935 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:10:57 crc kubenswrapper[4961]: I1205 18:10:57.246703 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:10:57 crc kubenswrapper[4961]: I1205 18:10:57.246748 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" gracePeriod=600 Dec 05 18:10:57 crc kubenswrapper[4961]: E1205 18:10:57.376056 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:10:58 crc kubenswrapper[4961]: I1205 18:10:58.139526 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" exitCode=0 Dec 05 18:10:58 crc kubenswrapper[4961]: I1205 18:10:58.139578 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b"} Dec 05 18:10:58 crc kubenswrapper[4961]: I1205 18:10:58.139818 4961 scope.go:117] "RemoveContainer" containerID="a4dfbf02cada67058f2ed254a8aeb3929d2402207851cbed84f73238ae936112" Dec 05 18:10:58 crc kubenswrapper[4961]: I1205 18:10:58.140414 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:10:58 crc kubenswrapper[4961]: E1205 18:10:58.140664 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:11:13 crc kubenswrapper[4961]: I1205 18:11:13.864957 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:11:13 crc kubenswrapper[4961]: E1205 18:11:13.865613 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:11:24 crc kubenswrapper[4961]: I1205 18:11:24.869269 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:11:24 crc kubenswrapper[4961]: E1205 18:11:24.870184 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:11:35 crc kubenswrapper[4961]: I1205 18:11:35.864265 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:11:35 crc kubenswrapper[4961]: E1205 18:11:35.865460 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:11:46 crc kubenswrapper[4961]: I1205 18:11:46.864351 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:11:46 crc kubenswrapper[4961]: E1205 18:11:46.865361 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:11:57 crc kubenswrapper[4961]: I1205 18:11:57.863891 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:11:57 crc kubenswrapper[4961]: E1205 18:11:57.864766 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:12:05 crc kubenswrapper[4961]: I1205 18:12:05.857068 4961 generic.go:334] "Generic (PLEG): container finished" podID="350a4c11-1d87-4f63-8ec8-c808de6e46b0" containerID="10905a19c4683d50ea888517c3106d8b424916c61a00c0dcd9d05db76ee77126" exitCode=0 Dec 05 18:12:05 crc kubenswrapper[4961]: I1205 18:12:05.857138 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" event={"ID":"350a4c11-1d87-4f63-8ec8-c808de6e46b0","Type":"ContainerDied","Data":"10905a19c4683d50ea888517c3106d8b424916c61a00c0dcd9d05db76ee77126"} Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.261940 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.370766 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-secret-0\") pod \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.370860 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-inventory\") pod \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.370930 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-combined-ca-bundle\") pod \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.370992 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jfrzx\" (UniqueName: \"kubernetes.io/projected/350a4c11-1d87-4f63-8ec8-c808de6e46b0-kube-api-access-jfrzx\") pod \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.371115 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-ssh-key\") pod \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\" (UID: \"350a4c11-1d87-4f63-8ec8-c808de6e46b0\") " Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.377372 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-combined-ca-bundle" (OuterVolumeSpecName: "libvirt-combined-ca-bundle") pod "350a4c11-1d87-4f63-8ec8-c808de6e46b0" (UID: "350a4c11-1d87-4f63-8ec8-c808de6e46b0"). InnerVolumeSpecName "libvirt-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.380614 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/350a4c11-1d87-4f63-8ec8-c808de6e46b0-kube-api-access-jfrzx" (OuterVolumeSpecName: "kube-api-access-jfrzx") pod "350a4c11-1d87-4f63-8ec8-c808de6e46b0" (UID: "350a4c11-1d87-4f63-8ec8-c808de6e46b0"). InnerVolumeSpecName "kube-api-access-jfrzx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.398467 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-inventory" (OuterVolumeSpecName: "inventory") pod "350a4c11-1d87-4f63-8ec8-c808de6e46b0" (UID: "350a4c11-1d87-4f63-8ec8-c808de6e46b0"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.408633 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-secret-0" (OuterVolumeSpecName: "libvirt-secret-0") pod "350a4c11-1d87-4f63-8ec8-c808de6e46b0" (UID: "350a4c11-1d87-4f63-8ec8-c808de6e46b0"). InnerVolumeSpecName "libvirt-secret-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.410126 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "350a4c11-1d87-4f63-8ec8-c808de6e46b0" (UID: "350a4c11-1d87-4f63-8ec8-c808de6e46b0"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.474295 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.474348 4961 reconciler_common.go:293] "Volume detached for volume \"libvirt-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.474384 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jfrzx\" (UniqueName: \"kubernetes.io/projected/350a4c11-1d87-4f63-8ec8-c808de6e46b0-kube-api-access-jfrzx\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.474410 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.474420 4961 reconciler_common.go:293] "Volume detached for volume \"libvirt-secret-0\" (UniqueName: \"kubernetes.io/secret/350a4c11-1d87-4f63-8ec8-c808de6e46b0-libvirt-secret-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.873085 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" event={"ID":"350a4c11-1d87-4f63-8ec8-c808de6e46b0","Type":"ContainerDied","Data":"04e6703312a24301521723798a97d550d33c2312d2d53a9a13b9af1513b30391"} Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.873128 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="04e6703312a24301521723798a97d550d33c2312d2d53a9a13b9af1513b30391" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.873184 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.959608 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6"] Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960009 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960025 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960039 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="extract-utilities" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960045 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="extract-utilities" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960054 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960060 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960075 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="350a4c11-1d87-4f63-8ec8-c808de6e46b0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960082 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="350a4c11-1d87-4f63-8ec8-c808de6e46b0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960091 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="extract-content" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960097 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="extract-content" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960109 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960114 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960133 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="extract-content" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960138 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="extract-content" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960154 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="extract-utilities" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960159 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="extract-utilities" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960180 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="extract-content" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960188 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="extract-content" Dec 05 18:12:07 crc kubenswrapper[4961]: E1205 18:12:07.960202 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="extract-utilities" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960209 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="extract-utilities" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960411 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6001a66-8816-4976-b49e-d28ec0185d4b" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960428 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb47854-8abd-40f4-b157-ce2bd0953361" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960439 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="350a4c11-1d87-4f63-8ec8-c808de6e46b0" containerName="libvirt-edpm-deployment-openstack-edpm-ipam" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.960450 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="90f9ef10-2686-4315-ae98-45e93c8781e3" containerName="registry-server" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.961414 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.964838 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-cell1-compute-config" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.964838 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"nova-migration-ssh-key" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.965215 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"nova-extra-config" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.965233 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.965385 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.965468 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.965622 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:12:07 crc kubenswrapper[4961]: I1205 18:12:07.971042 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6"] Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.088030 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.088556 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.088594 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.088657 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cl799\" (UniqueName: \"kubernetes.io/projected/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-kube-api-access-cl799\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.088894 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.089007 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.089216 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.089349 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.089436 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.191970 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cl799\" (UniqueName: \"kubernetes.io/projected/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-kube-api-access-cl799\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192100 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192154 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192246 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192308 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192361 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192421 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192478 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.192527 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.194804 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-extra-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.196321 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.197862 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.198425 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-inventory\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.198511 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-combined-ca-bundle\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.198856 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-ssh-key\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.201388 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-0\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.206589 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-1\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.208183 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cl799\" (UniqueName: \"kubernetes.io/projected/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-kube-api-access-cl799\") pod \"nova-edpm-deployment-openstack-edpm-ipam-crdt6\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.286761 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:12:08 crc kubenswrapper[4961]: I1205 18:12:08.902685 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6"] Dec 05 18:12:09 crc kubenswrapper[4961]: I1205 18:12:09.890994 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" event={"ID":"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9","Type":"ContainerStarted","Data":"a0477d9f7c331e06036340fe8cccccc59e31f001a46a4d466c9c732ff56ba728"} Dec 05 18:12:09 crc kubenswrapper[4961]: I1205 18:12:09.891286 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" event={"ID":"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9","Type":"ContainerStarted","Data":"b1738c66b30cc3f73cfef0b2758967d0e531bef74064014d7c41009135787a17"} Dec 05 18:12:09 crc kubenswrapper[4961]: I1205 18:12:09.917830 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" podStartSLOduration=2.404296199 podStartE2EDuration="2.917810886s" podCreationTimestamp="2025-12-05 18:12:07 +0000 UTC" firstStartedPulling="2025-12-05 18:12:08.917226273 +0000 UTC m=+2334.978376766" lastFinishedPulling="2025-12-05 18:12:09.43074097 +0000 UTC m=+2335.491891453" observedRunningTime="2025-12-05 18:12:09.910120206 +0000 UTC m=+2335.971270689" watchObservedRunningTime="2025-12-05 18:12:09.917810886 +0000 UTC m=+2335.978961379" Dec 05 18:12:11 crc kubenswrapper[4961]: I1205 18:12:11.863558 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:12:11 crc kubenswrapper[4961]: E1205 18:12:11.864120 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:12:23 crc kubenswrapper[4961]: I1205 18:12:23.864804 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:12:23 crc kubenswrapper[4961]: E1205 18:12:23.865844 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:12:37 crc kubenswrapper[4961]: I1205 18:12:37.864591 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:12:37 crc kubenswrapper[4961]: E1205 18:12:37.865859 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:12:48 crc kubenswrapper[4961]: I1205 18:12:48.864063 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:12:48 crc kubenswrapper[4961]: E1205 18:12:48.864829 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:13:03 crc kubenswrapper[4961]: I1205 18:13:03.864286 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:13:03 crc kubenswrapper[4961]: E1205 18:13:03.865026 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:13:18 crc kubenswrapper[4961]: I1205 18:13:18.863460 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:13:18 crc kubenswrapper[4961]: E1205 18:13:18.864145 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:13:32 crc kubenswrapper[4961]: I1205 18:13:32.864721 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:13:32 crc kubenswrapper[4961]: E1205 18:13:32.865560 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:13:45 crc kubenswrapper[4961]: I1205 18:13:45.863910 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:13:45 crc kubenswrapper[4961]: E1205 18:13:45.865118 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:14:00 crc kubenswrapper[4961]: I1205 18:14:00.864616 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:14:00 crc kubenswrapper[4961]: E1205 18:14:00.865541 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:14:12 crc kubenswrapper[4961]: I1205 18:14:12.863689 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:14:12 crc kubenswrapper[4961]: E1205 18:14:12.864459 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:14:25 crc kubenswrapper[4961]: I1205 18:14:25.863667 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:14:25 crc kubenswrapper[4961]: E1205 18:14:25.864599 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:14:39 crc kubenswrapper[4961]: I1205 18:14:39.863880 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:14:39 crc kubenswrapper[4961]: E1205 18:14:39.865065 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:14:51 crc kubenswrapper[4961]: I1205 18:14:51.864059 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:14:51 crc kubenswrapper[4961]: E1205 18:14:51.864847 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.150879 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz"] Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.153312 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.155769 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.155939 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.161703 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz"] Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.252754 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34accc06-9ea6-42d4-abe1-284ece277812-config-volume\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.252902 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-25l7n\" (UniqueName: \"kubernetes.io/projected/34accc06-9ea6-42d4-abe1-284ece277812-kube-api-access-25l7n\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.253012 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34accc06-9ea6-42d4-abe1-284ece277812-secret-volume\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.354508 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34accc06-9ea6-42d4-abe1-284ece277812-config-volume\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.354611 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-25l7n\" (UniqueName: \"kubernetes.io/projected/34accc06-9ea6-42d4-abe1-284ece277812-kube-api-access-25l7n\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.354657 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34accc06-9ea6-42d4-abe1-284ece277812-secret-volume\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.355458 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34accc06-9ea6-42d4-abe1-284ece277812-config-volume\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.361299 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34accc06-9ea6-42d4-abe1-284ece277812-secret-volume\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.383399 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-25l7n\" (UniqueName: \"kubernetes.io/projected/34accc06-9ea6-42d4-abe1-284ece277812-kube-api-access-25l7n\") pod \"collect-profiles-29415975-mrcwz\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.478256 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:00 crc kubenswrapper[4961]: I1205 18:15:00.927313 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz"] Dec 05 18:15:01 crc kubenswrapper[4961]: I1205 18:15:01.650657 4961 generic.go:334] "Generic (PLEG): container finished" podID="34accc06-9ea6-42d4-abe1-284ece277812" containerID="50d581a783cf4976997f390c76b298b5b8ab5323a1d67bb009e6709daa23a847" exitCode=0 Dec 05 18:15:01 crc kubenswrapper[4961]: I1205 18:15:01.651072 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" event={"ID":"34accc06-9ea6-42d4-abe1-284ece277812","Type":"ContainerDied","Data":"50d581a783cf4976997f390c76b298b5b8ab5323a1d67bb009e6709daa23a847"} Dec 05 18:15:01 crc kubenswrapper[4961]: I1205 18:15:01.651106 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" event={"ID":"34accc06-9ea6-42d4-abe1-284ece277812","Type":"ContainerStarted","Data":"dd38fe080318bcf5561d0ce17f627f86f60b821c35533436f810ee552800f55a"} Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.028227 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.211697 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34accc06-9ea6-42d4-abe1-284ece277812-secret-volume\") pod \"34accc06-9ea6-42d4-abe1-284ece277812\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.211938 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-25l7n\" (UniqueName: \"kubernetes.io/projected/34accc06-9ea6-42d4-abe1-284ece277812-kube-api-access-25l7n\") pod \"34accc06-9ea6-42d4-abe1-284ece277812\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.212059 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34accc06-9ea6-42d4-abe1-284ece277812-config-volume\") pod \"34accc06-9ea6-42d4-abe1-284ece277812\" (UID: \"34accc06-9ea6-42d4-abe1-284ece277812\") " Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.213096 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/34accc06-9ea6-42d4-abe1-284ece277812-config-volume" (OuterVolumeSpecName: "config-volume") pod "34accc06-9ea6-42d4-abe1-284ece277812" (UID: "34accc06-9ea6-42d4-abe1-284ece277812"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.219020 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/34accc06-9ea6-42d4-abe1-284ece277812-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "34accc06-9ea6-42d4-abe1-284ece277812" (UID: "34accc06-9ea6-42d4-abe1-284ece277812"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.223952 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/34accc06-9ea6-42d4-abe1-284ece277812-kube-api-access-25l7n" (OuterVolumeSpecName: "kube-api-access-25l7n") pod "34accc06-9ea6-42d4-abe1-284ece277812" (UID: "34accc06-9ea6-42d4-abe1-284ece277812"). InnerVolumeSpecName "kube-api-access-25l7n". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.314927 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-25l7n\" (UniqueName: \"kubernetes.io/projected/34accc06-9ea6-42d4-abe1-284ece277812-kube-api-access-25l7n\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.314980 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/34accc06-9ea6-42d4-abe1-284ece277812-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.314998 4961 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/34accc06-9ea6-42d4-abe1-284ece277812-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.673363 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" event={"ID":"34accc06-9ea6-42d4-abe1-284ece277812","Type":"ContainerDied","Data":"dd38fe080318bcf5561d0ce17f627f86f60b821c35533436f810ee552800f55a"} Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.673406 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dd38fe080318bcf5561d0ce17f627f86f60b821c35533436f810ee552800f55a" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.673436 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415975-mrcwz" Dec 05 18:15:03 crc kubenswrapper[4961]: I1205 18:15:03.864370 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:15:03 crc kubenswrapper[4961]: E1205 18:15:03.864856 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:15:04 crc kubenswrapper[4961]: I1205 18:15:04.109232 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd"] Dec 05 18:15:04 crc kubenswrapper[4961]: I1205 18:15:04.116908 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415930-s2vbd"] Dec 05 18:15:04 crc kubenswrapper[4961]: I1205 18:15:04.883733 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="45c41860-47e0-4fc6-b9e2-73308ab35bfe" path="/var/lib/kubelet/pods/45c41860-47e0-4fc6-b9e2-73308ab35bfe/volumes" Dec 05 18:15:07 crc kubenswrapper[4961]: I1205 18:15:07.722619 4961 generic.go:334] "Generic (PLEG): container finished" podID="19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" containerID="a0477d9f7c331e06036340fe8cccccc59e31f001a46a4d466c9c732ff56ba728" exitCode=0 Dec 05 18:15:07 crc kubenswrapper[4961]: I1205 18:15:07.722967 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" event={"ID":"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9","Type":"ContainerDied","Data":"a0477d9f7c331e06036340fe8cccccc59e31f001a46a4d466c9c732ff56ba728"} Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.153033 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.329731 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-combined-ca-bundle\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330147 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-0\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330218 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-inventory\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330260 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-0\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330316 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-extra-config-0\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330368 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-ssh-key\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330486 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-1\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330538 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cl799\" (UniqueName: \"kubernetes.io/projected/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-kube-api-access-cl799\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.330563 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-1\") pod \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\" (UID: \"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9\") " Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.336164 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-kube-api-access-cl799" (OuterVolumeSpecName: "kube-api-access-cl799") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "kube-api-access-cl799". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.336699 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-combined-ca-bundle" (OuterVolumeSpecName: "nova-combined-ca-bundle") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "nova-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.359102 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-1" (OuterVolumeSpecName: "nova-cell1-compute-config-1") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "nova-cell1-compute-config-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.364214 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-extra-config-0" (OuterVolumeSpecName: "nova-extra-config-0") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "nova-extra-config-0". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.364505 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-0" (OuterVolumeSpecName: "nova-cell1-compute-config-0") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "nova-cell1-compute-config-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.371700 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-1" (OuterVolumeSpecName: "nova-migration-ssh-key-1") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "nova-migration-ssh-key-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.378446 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-inventory" (OuterVolumeSpecName: "inventory") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.391061 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.392141 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-0" (OuterVolumeSpecName: "nova-migration-ssh-key-0") pod "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" (UID: "19f9b2f9-7ecf-4676-bd98-c3c4615d12c9"). InnerVolumeSpecName "nova-migration-ssh-key-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434603 4961 reconciler_common.go:293] "Volume detached for volume \"nova-extra-config-0\" (UniqueName: \"kubernetes.io/configmap/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-extra-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434639 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434652 4961 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-1\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434666 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cl799\" (UniqueName: \"kubernetes.io/projected/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-kube-api-access-cl799\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434678 4961 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-1\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-1\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434690 4961 reconciler_common.go:293] "Volume detached for volume \"nova-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434702 4961 reconciler_common.go:293] "Volume detached for volume \"nova-cell1-compute-config-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-cell1-compute-config-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434714 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.434726 4961 reconciler_common.go:293] "Volume detached for volume \"nova-migration-ssh-key-0\" (UniqueName: \"kubernetes.io/secret/19f9b2f9-7ecf-4676-bd98-c3c4615d12c9-nova-migration-ssh-key-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.746757 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" event={"ID":"19f9b2f9-7ecf-4676-bd98-c3c4615d12c9","Type":"ContainerDied","Data":"b1738c66b30cc3f73cfef0b2758967d0e531bef74064014d7c41009135787a17"} Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.746828 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b1738c66b30cc3f73cfef0b2758967d0e531bef74064014d7c41009135787a17" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.746856 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/nova-edpm-deployment-openstack-edpm-ipam-crdt6" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.853566 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq"] Dec 05 18:15:09 crc kubenswrapper[4961]: E1205 18:15:09.853962 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="34accc06-9ea6-42d4-abe1-284ece277812" containerName="collect-profiles" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.853976 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="34accc06-9ea6-42d4-abe1-284ece277812" containerName="collect-profiles" Dec 05 18:15:09 crc kubenswrapper[4961]: E1205 18:15:09.854006 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.854012 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.854178 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="19f9b2f9-7ecf-4676-bd98-c3c4615d12c9" containerName="nova-edpm-deployment-openstack-edpm-ipam" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.854200 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="34accc06-9ea6-42d4-abe1-284ece277812" containerName="collect-profiles" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.854794 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.856735 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"ceilometer-compute-config-data" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.856756 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"openstack-aee-default-env" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.857091 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplane-ansible-ssh-private-key-secret" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.857113 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"openstack-edpm-ipam-dockercfg-rhbf4" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.857136 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"dataplanenodeset-openstack-edpm-ipam" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865369 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865412 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7xd2\" (UniqueName: \"kubernetes.io/projected/96c8a8f6-8c0a-4c00-b80e-719556036c4e-kube-api-access-j7xd2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865460 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865495 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865583 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865631 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.865656 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.878734 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq"] Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967101 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967154 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7xd2\" (UniqueName: \"kubernetes.io/projected/96c8a8f6-8c0a-4c00-b80e-719556036c4e-kube-api-access-j7xd2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967211 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967247 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967336 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967393 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.967418 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.974171 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-0\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.974560 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-telemetry-combined-ca-bundle\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.977250 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ssh-key\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.977620 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-1\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.980369 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-inventory\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:09 crc kubenswrapper[4961]: I1205 18:15:09.993319 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:10 crc kubenswrapper[4961]: I1205 18:15:10.008685 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7xd2\" (UniqueName: \"kubernetes.io/projected/96c8a8f6-8c0a-4c00-b80e-719556036c4e-kube-api-access-j7xd2\") pod \"telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:10 crc kubenswrapper[4961]: I1205 18:15:10.173555 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:15:10 crc kubenswrapper[4961]: I1205 18:15:10.722801 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq"] Dec 05 18:15:10 crc kubenswrapper[4961]: I1205 18:15:10.739221 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:15:10 crc kubenswrapper[4961]: I1205 18:15:10.759708 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" event={"ID":"96c8a8f6-8c0a-4c00-b80e-719556036c4e","Type":"ContainerStarted","Data":"5371487421dd9e9b604dff3f1f38fd703a1b06cd00ea79ee798bf9777cfe9b6b"} Dec 05 18:15:11 crc kubenswrapper[4961]: I1205 18:15:11.773969 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" event={"ID":"96c8a8f6-8c0a-4c00-b80e-719556036c4e","Type":"ContainerStarted","Data":"abeb6ecb323684ff77538585d630e0673a356b07808383ff085e8e969b770ca1"} Dec 05 18:15:16 crc kubenswrapper[4961]: I1205 18:15:16.863537 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:15:16 crc kubenswrapper[4961]: E1205 18:15:16.864513 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:15:29 crc kubenswrapper[4961]: I1205 18:15:29.863799 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:15:29 crc kubenswrapper[4961]: E1205 18:15:29.864913 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:15:43 crc kubenswrapper[4961]: I1205 18:15:43.864077 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:15:43 crc kubenswrapper[4961]: E1205 18:15:43.864805 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:15:44 crc kubenswrapper[4961]: I1205 18:15:44.609148 4961 scope.go:117] "RemoveContainer" containerID="cf9784d6224eedf65690b56c4a042d26ed4e7e57ec423e5cf090da1749306ba3" Dec 05 18:15:56 crc kubenswrapper[4961]: I1205 18:15:56.864061 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:15:56 crc kubenswrapper[4961]: E1205 18:15:56.864970 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:16:11 crc kubenswrapper[4961]: I1205 18:16:11.864440 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:16:12 crc kubenswrapper[4961]: I1205 18:16:12.443652 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"af41fc6fc971c654acf93e33e1118a9716324f025cf05bd923d78a94ca7abd2c"} Dec 05 18:16:12 crc kubenswrapper[4961]: I1205 18:16:12.483899 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" podStartSLOduration=62.985018095 podStartE2EDuration="1m3.48386831s" podCreationTimestamp="2025-12-05 18:15:09 +0000 UTC" firstStartedPulling="2025-12-05 18:15:10.738624144 +0000 UTC m=+2516.799774627" lastFinishedPulling="2025-12-05 18:15:11.237474359 +0000 UTC m=+2517.298624842" observedRunningTime="2025-12-05 18:15:11.801361969 +0000 UTC m=+2517.862512452" watchObservedRunningTime="2025-12-05 18:16:12.48386831 +0000 UTC m=+2578.545018823" Dec 05 18:16:44 crc kubenswrapper[4961]: I1205 18:16:44.705081 4961 scope.go:117] "RemoveContainer" containerID="1d6e054ea34e4e99848a3d3d6e887da704a20613cb9f677db8587b58f949f2a5" Dec 05 18:16:44 crc kubenswrapper[4961]: I1205 18:16:44.739241 4961 scope.go:117] "RemoveContainer" containerID="bcc748971e14c66d9568b4a5b2dbc45f7a1bd077eeb5869ea0270a343c300fe1" Dec 05 18:16:44 crc kubenswrapper[4961]: I1205 18:16:44.786551 4961 scope.go:117] "RemoveContainer" containerID="28d5389266895a570ccf0981a00915c980065ca798575a3005df3d7328e18fe6" Dec 05 18:17:43 crc kubenswrapper[4961]: I1205 18:17:43.430025 4961 generic.go:334] "Generic (PLEG): container finished" podID="96c8a8f6-8c0a-4c00-b80e-719556036c4e" containerID="abeb6ecb323684ff77538585d630e0673a356b07808383ff085e8e969b770ca1" exitCode=0 Dec 05 18:17:43 crc kubenswrapper[4961]: I1205 18:17:43.430874 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" event={"ID":"96c8a8f6-8c0a-4c00-b80e-719556036c4e","Type":"ContainerDied","Data":"abeb6ecb323684ff77538585d630e0673a356b07808383ff085e8e969b770ca1"} Dec 05 18:17:44 crc kubenswrapper[4961]: I1205 18:17:44.923315 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093040 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-inventory\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093445 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-2\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093499 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-telemetry-combined-ca-bundle\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093552 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7xd2\" (UniqueName: \"kubernetes.io/projected/96c8a8f6-8c0a-4c00-b80e-719556036c4e-kube-api-access-j7xd2\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093641 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-1\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093685 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-0\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.093730 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ssh-key\") pod \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\" (UID: \"96c8a8f6-8c0a-4c00-b80e-719556036c4e\") " Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.109396 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-telemetry-combined-ca-bundle" (OuterVolumeSpecName: "telemetry-combined-ca-bundle") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "telemetry-combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.109432 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96c8a8f6-8c0a-4c00-b80e-719556036c4e-kube-api-access-j7xd2" (OuterVolumeSpecName: "kube-api-access-j7xd2") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "kube-api-access-j7xd2". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.127000 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-1" (OuterVolumeSpecName: "ceilometer-compute-config-data-1") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "ceilometer-compute-config-data-1". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.130188 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.134033 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-inventory" (OuterVolumeSpecName: "inventory") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "inventory". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.141934 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-2" (OuterVolumeSpecName: "ceilometer-compute-config-data-2") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "ceilometer-compute-config-data-2". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.150707 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-0" (OuterVolumeSpecName: "ceilometer-compute-config-data-0") pod "96c8a8f6-8c0a-4c00-b80e-719556036c4e" (UID: "96c8a8f6-8c0a-4c00-b80e-719556036c4e"). InnerVolumeSpecName "ceilometer-compute-config-data-0". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196281 4961 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-2\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-2\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196324 4961 reconciler_common.go:293] "Volume detached for volume \"telemetry-combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-telemetry-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196334 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7xd2\" (UniqueName: \"kubernetes.io/projected/96c8a8f6-8c0a-4c00-b80e-719556036c4e-kube-api-access-j7xd2\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196343 4961 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-1\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-1\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196352 4961 reconciler_common.go:293] "Volume detached for volume \"ceilometer-compute-config-data-0\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ceilometer-compute-config-data-0\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196361 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.196370 4961 reconciler_common.go:293] "Volume detached for volume \"inventory\" (UniqueName: \"kubernetes.io/secret/96c8a8f6-8c0a-4c00-b80e-719556036c4e-inventory\") on node \"crc\" DevicePath \"\"" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.456511 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" event={"ID":"96c8a8f6-8c0a-4c00-b80e-719556036c4e","Type":"ContainerDied","Data":"5371487421dd9e9b604dff3f1f38fd703a1b06cd00ea79ee798bf9777cfe9b6b"} Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.456572 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="5371487421dd9e9b604dff3f1f38fd703a1b06cd00ea79ee798bf9777cfe9b6b" Dec 05 18:17:45 crc kubenswrapper[4961]: I1205 18:17:45.456605 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq" Dec 05 18:18:27 crc kubenswrapper[4961]: I1205 18:18:27.246211 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:18:27 crc kubenswrapper[4961]: I1205 18:18:27.246832 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.020112 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 18:18:37 crc kubenswrapper[4961]: E1205 18:18:37.021222 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="96c8a8f6-8c0a-4c00-b80e-719556036c4e" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.021243 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="96c8a8f6-8c0a-4c00-b80e-719556036c4e" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.021463 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="96c8a8f6-8c0a-4c00-b80e-719556036c4e" containerName="telemetry-edpm-deployment-openstack-edpm-ipam" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.022473 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.032716 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.071872 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.072222 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-m9vdq" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.072474 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-custom-data-s0" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.072593 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"test-operator-controller-priv-key" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.082575 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.082934 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.082984 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-config-data\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185062 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185140 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-config-data\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185169 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185236 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185275 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185319 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7crlt\" (UniqueName: \"kubernetes.io/projected/61480a22-be11-4fc5-83fa-8736dec80842-kube-api-access-7crlt\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185359 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185402 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.185427 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.186413 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.188026 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-config-data\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.193112 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config-secret\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288222 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288314 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288382 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7crlt\" (UniqueName: \"kubernetes.io/projected/61480a22-be11-4fc5-83fa-8736dec80842-kube-api-access-7crlt\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288530 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288554 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288638 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.288808 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.289493 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-workdir\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.289835 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-temporary\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.292062 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ssh-key\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.299592 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ca-certs\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.311521 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7crlt\" (UniqueName: \"kubernetes.io/projected/61480a22-be11-4fc5-83fa-8736dec80842-kube-api-access-7crlt\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.325417 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"tempest-tests-tempest\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.401832 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:18:37 crc kubenswrapper[4961]: I1205 18:18:37.872197 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/tempest-tests-tempest"] Dec 05 18:18:37 crc kubenswrapper[4961]: W1205 18:18:37.874337 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod61480a22_be11_4fc5_83fa_8736dec80842.slice/crio-e67616128f81e7650da534d02bc1c2bed596efad116c3db74d2832d1ee7ca95d WatchSource:0}: Error finding container e67616128f81e7650da534d02bc1c2bed596efad116c3db74d2832d1ee7ca95d: Status 404 returned error can't find the container with id e67616128f81e7650da534d02bc1c2bed596efad116c3db74d2832d1ee7ca95d Dec 05 18:18:38 crc kubenswrapper[4961]: I1205 18:18:38.008188 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"61480a22-be11-4fc5-83fa-8736dec80842","Type":"ContainerStarted","Data":"e67616128f81e7650da534d02bc1c2bed596efad116c3db74d2832d1ee7ca95d"} Dec 05 18:18:57 crc kubenswrapper[4961]: I1205 18:18:57.246016 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:18:57 crc kubenswrapper[4961]: I1205 18:18:57.246606 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:19:12 crc kubenswrapper[4961]: E1205 18:19:12.498142 4961 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified" Dec 05 18:19:12 crc kubenswrapper[4961]: E1205 18:19:12.498830 4961 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:tempest-tests-tempest-tests-runner,Image:quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:false,MountPath:/etc/test_operator,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-workdir,ReadOnly:false,MountPath:/var/lib/tempest,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-ephemeral-temporary,ReadOnly:false,MountPath:/tmp,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:test-operator-logs,ReadOnly:false,MountPath:/var/lib/tempest/external_files,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/etc/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config,ReadOnly:true,MountPath:/var/lib/tempest/.config/openstack/clouds.yaml,SubPath:clouds.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:openstack-config-secret,ReadOnly:false,MountPath:/etc/openstack/secure.yaml,SubPath:secure.yaml,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ca-certs,ReadOnly:true,MountPath:/etc/pki/ca-trust/extracted/pem/tls-ca-bundle.pem,SubPath:tls-ca-bundle.pem,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:ssh-key,ReadOnly:false,MountPath:/var/lib/tempest/id_ecdsa,SubPath:ssh_key,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7crlt,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42480,RunAsNonRoot:*false,ReadOnlyRootFilesystem:*false,AllowPrivilegeEscalation:*true,RunAsGroup:*42480,ProcMount:nil,WindowsOptions:nil,SeccompProfile:&SeccompProfile{Type:RuntimeDefault,LocalhostProfile:nil,},AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-custom-data-s0,},Optional:nil,},SecretRef:nil,},EnvFromSource{Prefix:,ConfigMapRef:&ConfigMapEnvSource{LocalObjectReference:LocalObjectReference{Name:tempest-tests-tempest-env-vars-s0,},Optional:nil,},SecretRef:nil,},},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod tempest-tests-tempest_openstack(61480a22-be11-4fc5-83fa-8736dec80842): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Dec 05 18:19:12 crc kubenswrapper[4961]: E1205 18:19:12.500302 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="openstack/tempest-tests-tempest" podUID="61480a22-be11-4fc5-83fa-8736dec80842" Dec 05 18:19:13 crc kubenswrapper[4961]: E1205 18:19:13.360661 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"tempest-tests-tempest-tests-runner\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-tempest-all:current-podified\\\"\"" pod="openstack/tempest-tests-tempest" podUID="61480a22-be11-4fc5-83fa-8736dec80842" Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.245548 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.246275 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.246346 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.247467 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"af41fc6fc971c654acf93e33e1118a9716324f025cf05bd923d78a94ca7abd2c"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.247575 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://af41fc6fc971c654acf93e33e1118a9716324f025cf05bd923d78a94ca7abd2c" gracePeriod=600 Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.502623 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="af41fc6fc971c654acf93e33e1118a9716324f025cf05bd923d78a94ca7abd2c" exitCode=0 Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.502715 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"af41fc6fc971c654acf93e33e1118a9716324f025cf05bd923d78a94ca7abd2c"} Dec 05 18:19:27 crc kubenswrapper[4961]: I1205 18:19:27.503052 4961 scope.go:117] "RemoveContainer" containerID="8dfddd3e720c0e76d031bd5bb9ded5d85603886ff6c94958d15e04d0c199839b" Dec 05 18:19:28 crc kubenswrapper[4961]: I1205 18:19:28.336655 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack"/"tempest-tests-tempest-env-vars-s0" Dec 05 18:19:28 crc kubenswrapper[4961]: I1205 18:19:28.518208 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf"} Dec 05 18:19:29 crc kubenswrapper[4961]: I1205 18:19:29.531676 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"61480a22-be11-4fc5-83fa-8736dec80842","Type":"ContainerStarted","Data":"0550bc109d1cac8af881b6603902abe9bd2deeae7499fdd53482aa2b546845a1"} Dec 05 18:21:01 crc kubenswrapper[4961]: I1205 18:21:01.868054 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/tempest-tests-tempest" podStartSLOduration=96.411888928 podStartE2EDuration="2m26.868028808s" podCreationTimestamp="2025-12-05 18:18:35 +0000 UTC" firstStartedPulling="2025-12-05 18:18:37.877267177 +0000 UTC m=+2723.938417680" lastFinishedPulling="2025-12-05 18:19:28.333407077 +0000 UTC m=+2774.394557560" observedRunningTime="2025-12-05 18:19:29.557181199 +0000 UTC m=+2775.618331742" watchObservedRunningTime="2025-12-05 18:21:01.868028808 +0000 UTC m=+2867.929179301" Dec 05 18:21:01 crc kubenswrapper[4961]: I1205 18:21:01.935085 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rppzr"] Dec 05 18:21:01 crc kubenswrapper[4961]: I1205 18:21:01.937503 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:01 crc kubenswrapper[4961]: I1205 18:21:01.943546 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rppzr"] Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.136885 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-catalog-content\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.136941 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nlzjx\" (UniqueName: \"kubernetes.io/projected/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-kube-api-access-nlzjx\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.136995 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-utilities\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.238548 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-catalog-content\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.238649 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nlzjx\" (UniqueName: \"kubernetes.io/projected/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-kube-api-access-nlzjx\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.238718 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-utilities\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.239167 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-catalog-content\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.239218 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-utilities\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.259600 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nlzjx\" (UniqueName: \"kubernetes.io/projected/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-kube-api-access-nlzjx\") pod \"redhat-marketplace-rppzr\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.264530 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:02 crc kubenswrapper[4961]: I1205 18:21:02.801164 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rppzr"] Dec 05 18:21:03 crc kubenswrapper[4961]: I1205 18:21:03.512844 4961 generic.go:334] "Generic (PLEG): container finished" podID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerID="c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550" exitCode=0 Dec 05 18:21:03 crc kubenswrapper[4961]: I1205 18:21:03.512890 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rppzr" event={"ID":"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2","Type":"ContainerDied","Data":"c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550"} Dec 05 18:21:03 crc kubenswrapper[4961]: I1205 18:21:03.513117 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rppzr" event={"ID":"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2","Type":"ContainerStarted","Data":"9c9ed699c9442ae50803a3c81e2437273198dd36982f65b87e4c13742fa60670"} Dec 05 18:21:03 crc kubenswrapper[4961]: I1205 18:21:03.514768 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:21:04 crc kubenswrapper[4961]: I1205 18:21:04.526308 4961 generic.go:334] "Generic (PLEG): container finished" podID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerID="51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e" exitCode=0 Dec 05 18:21:04 crc kubenswrapper[4961]: I1205 18:21:04.526481 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rppzr" event={"ID":"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2","Type":"ContainerDied","Data":"51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e"} Dec 05 18:21:05 crc kubenswrapper[4961]: I1205 18:21:05.538123 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rppzr" event={"ID":"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2","Type":"ContainerStarted","Data":"49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5"} Dec 05 18:21:05 crc kubenswrapper[4961]: I1205 18:21:05.562162 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rppzr" podStartSLOduration=3.032799127 podStartE2EDuration="4.562138329s" podCreationTimestamp="2025-12-05 18:21:01 +0000 UTC" firstStartedPulling="2025-12-05 18:21:03.514500218 +0000 UTC m=+2869.575650691" lastFinishedPulling="2025-12-05 18:21:05.04383942 +0000 UTC m=+2871.104989893" observedRunningTime="2025-12-05 18:21:05.554028101 +0000 UTC m=+2871.615178574" watchObservedRunningTime="2025-12-05 18:21:05.562138329 +0000 UTC m=+2871.623288802" Dec 05 18:21:12 crc kubenswrapper[4961]: I1205 18:21:12.265323 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:12 crc kubenswrapper[4961]: I1205 18:21:12.265848 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:12 crc kubenswrapper[4961]: I1205 18:21:12.311307 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:12 crc kubenswrapper[4961]: I1205 18:21:12.676024 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:12 crc kubenswrapper[4961]: I1205 18:21:12.723542 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rppzr"] Dec 05 18:21:14 crc kubenswrapper[4961]: I1205 18:21:14.643241 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rppzr" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="registry-server" containerID="cri-o://49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5" gracePeriod=2 Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.171043 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.287073 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-catalog-content\") pod \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.287191 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nlzjx\" (UniqueName: \"kubernetes.io/projected/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-kube-api-access-nlzjx\") pod \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.287285 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-utilities\") pod \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\" (UID: \"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2\") " Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.288673 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-utilities" (OuterVolumeSpecName: "utilities") pod "c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" (UID: "c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.294300 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-kube-api-access-nlzjx" (OuterVolumeSpecName: "kube-api-access-nlzjx") pod "c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" (UID: "c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2"). InnerVolumeSpecName "kube-api-access-nlzjx". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.317197 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" (UID: "c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.391097 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.391164 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nlzjx\" (UniqueName: \"kubernetes.io/projected/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-kube-api-access-nlzjx\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.391186 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.657541 4961 generic.go:334] "Generic (PLEG): container finished" podID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerID="49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5" exitCode=0 Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.657604 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rppzr" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.657662 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rppzr" event={"ID":"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2","Type":"ContainerDied","Data":"49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5"} Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.658034 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rppzr" event={"ID":"c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2","Type":"ContainerDied","Data":"9c9ed699c9442ae50803a3c81e2437273198dd36982f65b87e4c13742fa60670"} Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.658060 4961 scope.go:117] "RemoveContainer" containerID="49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.676473 4961 scope.go:117] "RemoveContainer" containerID="51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.698903 4961 scope.go:117] "RemoveContainer" containerID="c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.715275 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rppzr"] Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.724944 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rppzr"] Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.744196 4961 scope.go:117] "RemoveContainer" containerID="49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5" Dec 05 18:21:15 crc kubenswrapper[4961]: E1205 18:21:15.744573 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5\": container with ID starting with 49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5 not found: ID does not exist" containerID="49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.744618 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5"} err="failed to get container status \"49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5\": rpc error: code = NotFound desc = could not find container \"49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5\": container with ID starting with 49f76f2131f5f18a0b4e23bb4a4cd5bf6377e8651a4274dc7ecdedb3444c7fb5 not found: ID does not exist" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.744649 4961 scope.go:117] "RemoveContainer" containerID="51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e" Dec 05 18:21:15 crc kubenswrapper[4961]: E1205 18:21:15.746334 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e\": container with ID starting with 51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e not found: ID does not exist" containerID="51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.746371 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e"} err="failed to get container status \"51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e\": rpc error: code = NotFound desc = could not find container \"51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e\": container with ID starting with 51126c48f6fca0c8194eb18f64c411b1a582a4730b01f1dce24021f694981e0e not found: ID does not exist" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.746403 4961 scope.go:117] "RemoveContainer" containerID="c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550" Dec 05 18:21:15 crc kubenswrapper[4961]: E1205 18:21:15.746764 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550\": container with ID starting with c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550 not found: ID does not exist" containerID="c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550" Dec 05 18:21:15 crc kubenswrapper[4961]: I1205 18:21:15.746800 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550"} err="failed to get container status \"c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550\": rpc error: code = NotFound desc = could not find container \"c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550\": container with ID starting with c2d788a82c8f24a34f510673e44d0f7d890226facdd673cb0a3aa63a75c6a550 not found: ID does not exist" Dec 05 18:21:16 crc kubenswrapper[4961]: I1205 18:21:16.893322 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" path="/var/lib/kubelet/pods/c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2/volumes" Dec 05 18:21:27 crc kubenswrapper[4961]: I1205 18:21:27.245676 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:21:27 crc kubenswrapper[4961]: I1205 18:21:27.246253 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:21:57 crc kubenswrapper[4961]: I1205 18:21:57.245225 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:21:57 crc kubenswrapper[4961]: I1205 18:21:57.245896 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.246184 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.247040 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.247131 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.248462 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.248597 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" gracePeriod=600 Dec 05 18:22:27 crc kubenswrapper[4961]: E1205 18:22:27.385528 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.390925 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" exitCode=0 Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.390975 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf"} Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.391016 4961 scope.go:117] "RemoveContainer" containerID="af41fc6fc971c654acf93e33e1118a9716324f025cf05bd923d78a94ca7abd2c" Dec 05 18:22:27 crc kubenswrapper[4961]: I1205 18:22:27.391945 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:22:27 crc kubenswrapper[4961]: E1205 18:22:27.392287 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:22:38 crc kubenswrapper[4961]: I1205 18:22:38.863556 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:22:38 crc kubenswrapper[4961]: E1205 18:22:38.864424 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:22:49 crc kubenswrapper[4961]: I1205 18:22:49.864882 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:22:49 crc kubenswrapper[4961]: E1205 18:22:49.866657 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:23:00 crc kubenswrapper[4961]: I1205 18:23:00.863981 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:23:00 crc kubenswrapper[4961]: E1205 18:23:00.864586 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:23:13 crc kubenswrapper[4961]: I1205 18:23:13.864442 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:23:13 crc kubenswrapper[4961]: E1205 18:23:13.865507 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:23:28 crc kubenswrapper[4961]: I1205 18:23:28.864410 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:23:28 crc kubenswrapper[4961]: E1205 18:23:28.865538 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:23:41 crc kubenswrapper[4961]: I1205 18:23:41.864552 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:23:41 crc kubenswrapper[4961]: E1205 18:23:41.865377 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:23:56 crc kubenswrapper[4961]: I1205 18:23:56.863853 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:23:56 crc kubenswrapper[4961]: E1205 18:23:56.864738 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:24:09 crc kubenswrapper[4961]: I1205 18:24:09.863898 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:24:09 crc kubenswrapper[4961]: E1205 18:24:09.864949 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:24:22 crc kubenswrapper[4961]: I1205 18:24:22.864311 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:24:22 crc kubenswrapper[4961]: E1205 18:24:22.865204 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:24:37 crc kubenswrapper[4961]: I1205 18:24:37.863740 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:24:37 crc kubenswrapper[4961]: E1205 18:24:37.864521 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:24:51 crc kubenswrapper[4961]: I1205 18:24:51.864578 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:24:51 crc kubenswrapper[4961]: E1205 18:24:51.865807 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:25:05 crc kubenswrapper[4961]: I1205 18:25:05.863694 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:25:05 crc kubenswrapper[4961]: E1205 18:25:05.865517 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:25:18 crc kubenswrapper[4961]: I1205 18:25:18.864307 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:25:18 crc kubenswrapper[4961]: E1205 18:25:18.865379 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:25:31 crc kubenswrapper[4961]: I1205 18:25:31.864696 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:25:31 crc kubenswrapper[4961]: E1205 18:25:31.865703 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:25:44 crc kubenswrapper[4961]: I1205 18:25:44.870651 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:25:44 crc kubenswrapper[4961]: E1205 18:25:44.871643 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.424863 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-59qsl"] Dec 05 18:25:55 crc kubenswrapper[4961]: E1205 18:25:55.425760 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="registry-server" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.425791 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="registry-server" Dec 05 18:25:55 crc kubenswrapper[4961]: E1205 18:25:55.425820 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="extract-utilities" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.425829 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="extract-utilities" Dec 05 18:25:55 crc kubenswrapper[4961]: E1205 18:25:55.425853 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="extract-content" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.425862 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="extract-content" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.426079 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c1357e5e-8ae4-4f9a-a7d8-8da30a2105d2" containerName="registry-server" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.427825 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.431429 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-59qsl"] Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.519758 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-utilities\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.519833 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rvfwl\" (UniqueName: \"kubernetes.io/projected/cea6a107-2ced-4158-9446-ab66c0b5452f-kube-api-access-rvfwl\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.520168 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-catalog-content\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.606213 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-42n6d"] Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.607895 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.622753 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-utilities\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.622835 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rvfwl\" (UniqueName: \"kubernetes.io/projected/cea6a107-2ced-4158-9446-ab66c0b5452f-kube-api-access-rvfwl\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.622938 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-catalog-content\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.623602 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-catalog-content\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.623932 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-utilities\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.631400 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42n6d"] Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.665734 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rvfwl\" (UniqueName: \"kubernetes.io/projected/cea6a107-2ced-4158-9446-ab66c0b5452f-kube-api-access-rvfwl\") pod \"community-operators-59qsl\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.724519 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-utilities\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.724963 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-catalog-content\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.725052 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dgsns\" (UniqueName: \"kubernetes.io/projected/a993fcd3-a607-4987-80ff-887da63738db-kube-api-access-dgsns\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.755384 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.827259 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-utilities\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.827722 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-catalog-content\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.827758 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dgsns\" (UniqueName: \"kubernetes.io/projected/a993fcd3-a607-4987-80ff-887da63738db-kube-api-access-dgsns\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.828740 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-utilities\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.829078 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-catalog-content\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.866439 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dgsns\" (UniqueName: \"kubernetes.io/projected/a993fcd3-a607-4987-80ff-887da63738db-kube-api-access-dgsns\") pod \"certified-operators-42n6d\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:55 crc kubenswrapper[4961]: I1205 18:25:55.938635 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.329836 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-59qsl"] Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.498709 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-42n6d"] Dec 05 18:25:56 crc kubenswrapper[4961]: W1205 18:25:56.504935 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda993fcd3_a607_4987_80ff_887da63738db.slice/crio-4369ea8c65169cd89a2a4d3cd7b7505d08f50cd9b4755d5839ec2e7f71ef3072 WatchSource:0}: Error finding container 4369ea8c65169cd89a2a4d3cd7b7505d08f50cd9b4755d5839ec2e7f71ef3072: Status 404 returned error can't find the container with id 4369ea8c65169cd89a2a4d3cd7b7505d08f50cd9b4755d5839ec2e7f71ef3072 Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.534282 4961 generic.go:334] "Generic (PLEG): container finished" podID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerID="b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8" exitCode=0 Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.534338 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59qsl" event={"ID":"cea6a107-2ced-4158-9446-ab66c0b5452f","Type":"ContainerDied","Data":"b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8"} Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.534362 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59qsl" event={"ID":"cea6a107-2ced-4158-9446-ab66c0b5452f","Type":"ContainerStarted","Data":"8c89f58eeddc38981e127e2bb997ba12eab30c3bb891d6168d7e792f206ad3fb"} Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.537853 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42n6d" event={"ID":"a993fcd3-a607-4987-80ff-887da63738db","Type":"ContainerStarted","Data":"4369ea8c65169cd89a2a4d3cd7b7505d08f50cd9b4755d5839ec2e7f71ef3072"} Dec 05 18:25:56 crc kubenswrapper[4961]: I1205 18:25:56.864028 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:25:56 crc kubenswrapper[4961]: E1205 18:25:56.864466 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:25:57 crc kubenswrapper[4961]: I1205 18:25:57.547757 4961 generic.go:334] "Generic (PLEG): container finished" podID="a993fcd3-a607-4987-80ff-887da63738db" containerID="684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de" exitCode=0 Dec 05 18:25:57 crc kubenswrapper[4961]: I1205 18:25:57.547877 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42n6d" event={"ID":"a993fcd3-a607-4987-80ff-887da63738db","Type":"ContainerDied","Data":"684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de"} Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.019648 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gf74b"] Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.022340 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.030909 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gf74b"] Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.110505 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-catalog-content\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.110870 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xqnhl\" (UniqueName: \"kubernetes.io/projected/64428795-e493-4f34-9e8c-78b07e430963-kube-api-access-xqnhl\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.110904 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-utilities\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.212238 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-catalog-content\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.212358 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xqnhl\" (UniqueName: \"kubernetes.io/projected/64428795-e493-4f34-9e8c-78b07e430963-kube-api-access-xqnhl\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.212432 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-utilities\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.212830 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-utilities\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.212883 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-catalog-content\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.233502 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xqnhl\" (UniqueName: \"kubernetes.io/projected/64428795-e493-4f34-9e8c-78b07e430963-kube-api-access-xqnhl\") pod \"redhat-operators-gf74b\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.347803 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.564393 4961 generic.go:334] "Generic (PLEG): container finished" podID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerID="bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d" exitCode=0 Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.564848 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59qsl" event={"ID":"cea6a107-2ced-4158-9446-ab66c0b5452f","Type":"ContainerDied","Data":"bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d"} Dec 05 18:25:58 crc kubenswrapper[4961]: I1205 18:25:58.923107 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gf74b"] Dec 05 18:25:59 crc kubenswrapper[4961]: I1205 18:25:59.577675 4961 generic.go:334] "Generic (PLEG): container finished" podID="a993fcd3-a607-4987-80ff-887da63738db" containerID="9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104" exitCode=0 Dec 05 18:25:59 crc kubenswrapper[4961]: I1205 18:25:59.577754 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42n6d" event={"ID":"a993fcd3-a607-4987-80ff-887da63738db","Type":"ContainerDied","Data":"9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104"} Dec 05 18:25:59 crc kubenswrapper[4961]: I1205 18:25:59.579649 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerStarted","Data":"465604dbc31be79a3933bac2cd786d74373fb997511187f7992496e1683f48c9"} Dec 05 18:26:00 crc kubenswrapper[4961]: I1205 18:26:00.591749 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42n6d" event={"ID":"a993fcd3-a607-4987-80ff-887da63738db","Type":"ContainerStarted","Data":"9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897"} Dec 05 18:26:00 crc kubenswrapper[4961]: I1205 18:26:00.594815 4961 generic.go:334] "Generic (PLEG): container finished" podID="64428795-e493-4f34-9e8c-78b07e430963" containerID="3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47" exitCode=0 Dec 05 18:26:00 crc kubenswrapper[4961]: I1205 18:26:00.594950 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerDied","Data":"3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47"} Dec 05 18:26:00 crc kubenswrapper[4961]: I1205 18:26:00.598084 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59qsl" event={"ID":"cea6a107-2ced-4158-9446-ab66c0b5452f","Type":"ContainerStarted","Data":"ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097"} Dec 05 18:26:00 crc kubenswrapper[4961]: I1205 18:26:00.620958 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-42n6d" podStartSLOduration=3.195489723 podStartE2EDuration="5.6209358s" podCreationTimestamp="2025-12-05 18:25:55 +0000 UTC" firstStartedPulling="2025-12-05 18:25:57.549156474 +0000 UTC m=+3163.610306947" lastFinishedPulling="2025-12-05 18:25:59.974602541 +0000 UTC m=+3166.035753024" observedRunningTime="2025-12-05 18:26:00.617552077 +0000 UTC m=+3166.678702550" watchObservedRunningTime="2025-12-05 18:26:00.6209358 +0000 UTC m=+3166.682086273" Dec 05 18:26:00 crc kubenswrapper[4961]: I1205 18:26:00.653824 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-59qsl" podStartSLOduration=2.309217523 podStartE2EDuration="5.653751304s" podCreationTimestamp="2025-12-05 18:25:55 +0000 UTC" firstStartedPulling="2025-12-05 18:25:56.535787701 +0000 UTC m=+3162.596938174" lastFinishedPulling="2025-12-05 18:25:59.880321482 +0000 UTC m=+3165.941471955" observedRunningTime="2025-12-05 18:26:00.645465941 +0000 UTC m=+3166.706616414" watchObservedRunningTime="2025-12-05 18:26:00.653751304 +0000 UTC m=+3166.714901777" Dec 05 18:26:01 crc kubenswrapper[4961]: I1205 18:26:01.608890 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerStarted","Data":"84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3"} Dec 05 18:26:03 crc kubenswrapper[4961]: I1205 18:26:03.628591 4961 generic.go:334] "Generic (PLEG): container finished" podID="64428795-e493-4f34-9e8c-78b07e430963" containerID="84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3" exitCode=0 Dec 05 18:26:03 crc kubenswrapper[4961]: I1205 18:26:03.628691 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerDied","Data":"84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3"} Dec 05 18:26:03 crc kubenswrapper[4961]: I1205 18:26:03.630886 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:26:04 crc kubenswrapper[4961]: I1205 18:26:04.643208 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerStarted","Data":"d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e"} Dec 05 18:26:05 crc kubenswrapper[4961]: I1205 18:26:05.756537 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:26:05 crc kubenswrapper[4961]: I1205 18:26:05.757697 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:26:05 crc kubenswrapper[4961]: I1205 18:26:05.940260 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:26:05 crc kubenswrapper[4961]: I1205 18:26:05.940571 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:26:06 crc kubenswrapper[4961]: I1205 18:26:06.016316 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:26:06 crc kubenswrapper[4961]: I1205 18:26:06.036611 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gf74b" podStartSLOduration=5.473280078 podStartE2EDuration="9.036585665s" podCreationTimestamp="2025-12-05 18:25:57 +0000 UTC" firstStartedPulling="2025-12-05 18:26:00.598011269 +0000 UTC m=+3166.659161742" lastFinishedPulling="2025-12-05 18:26:04.161316856 +0000 UTC m=+3170.222467329" observedRunningTime="2025-12-05 18:26:04.66361286 +0000 UTC m=+3170.724763373" watchObservedRunningTime="2025-12-05 18:26:06.036585665 +0000 UTC m=+3172.097736138" Dec 05 18:26:06 crc kubenswrapper[4961]: I1205 18:26:06.707128 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:26:06 crc kubenswrapper[4961]: I1205 18:26:06.807237 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/community-operators-59qsl" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="registry-server" probeResult="failure" output=< Dec 05 18:26:06 crc kubenswrapper[4961]: timeout: failed to connect service ":50051" within 1s Dec 05 18:26:06 crc kubenswrapper[4961]: > Dec 05 18:26:08 crc kubenswrapper[4961]: I1205 18:26:08.348472 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:26:08 crc kubenswrapper[4961]: I1205 18:26:08.348754 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:26:08 crc kubenswrapper[4961]: I1205 18:26:08.402498 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-42n6d"] Dec 05 18:26:09 crc kubenswrapper[4961]: I1205 18:26:09.394207 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gf74b" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="registry-server" probeResult="failure" output=< Dec 05 18:26:09 crc kubenswrapper[4961]: timeout: failed to connect service ":50051" within 1s Dec 05 18:26:09 crc kubenswrapper[4961]: > Dec 05 18:26:09 crc kubenswrapper[4961]: I1205 18:26:09.679800 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-42n6d" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="registry-server" containerID="cri-o://9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897" gracePeriod=2 Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.159408 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.258718 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dgsns\" (UniqueName: \"kubernetes.io/projected/a993fcd3-a607-4987-80ff-887da63738db-kube-api-access-dgsns\") pod \"a993fcd3-a607-4987-80ff-887da63738db\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.259164 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-catalog-content\") pod \"a993fcd3-a607-4987-80ff-887da63738db\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.262932 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-utilities\") pod \"a993fcd3-a607-4987-80ff-887da63738db\" (UID: \"a993fcd3-a607-4987-80ff-887da63738db\") " Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.263650 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-utilities" (OuterVolumeSpecName: "utilities") pod "a993fcd3-a607-4987-80ff-887da63738db" (UID: "a993fcd3-a607-4987-80ff-887da63738db"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.265263 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a993fcd3-a607-4987-80ff-887da63738db-kube-api-access-dgsns" (OuterVolumeSpecName: "kube-api-access-dgsns") pod "a993fcd3-a607-4987-80ff-887da63738db" (UID: "a993fcd3-a607-4987-80ff-887da63738db"). InnerVolumeSpecName "kube-api-access-dgsns". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.307083 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a993fcd3-a607-4987-80ff-887da63738db" (UID: "a993fcd3-a607-4987-80ff-887da63738db"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.365248 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.365283 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dgsns\" (UniqueName: \"kubernetes.io/projected/a993fcd3-a607-4987-80ff-887da63738db-kube-api-access-dgsns\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.365292 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a993fcd3-a607-4987-80ff-887da63738db-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.694091 4961 generic.go:334] "Generic (PLEG): container finished" podID="a993fcd3-a607-4987-80ff-887da63738db" containerID="9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897" exitCode=0 Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.694143 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42n6d" event={"ID":"a993fcd3-a607-4987-80ff-887da63738db","Type":"ContainerDied","Data":"9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897"} Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.694174 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-42n6d" event={"ID":"a993fcd3-a607-4987-80ff-887da63738db","Type":"ContainerDied","Data":"4369ea8c65169cd89a2a4d3cd7b7505d08f50cd9b4755d5839ec2e7f71ef3072"} Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.694197 4961 scope.go:117] "RemoveContainer" containerID="9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.694238 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-42n6d" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.732078 4961 scope.go:117] "RemoveContainer" containerID="9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.752078 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-42n6d"] Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.775363 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-42n6d"] Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.789171 4961 scope.go:117] "RemoveContainer" containerID="684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.822598 4961 scope.go:117] "RemoveContainer" containerID="9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897" Dec 05 18:26:10 crc kubenswrapper[4961]: E1205 18:26:10.823288 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897\": container with ID starting with 9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897 not found: ID does not exist" containerID="9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.823328 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897"} err="failed to get container status \"9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897\": rpc error: code = NotFound desc = could not find container \"9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897\": container with ID starting with 9a391016938890b656b7dd063e5cb7f83d3093e7bf057a74498b50e6d7312897 not found: ID does not exist" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.823356 4961 scope.go:117] "RemoveContainer" containerID="9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104" Dec 05 18:26:10 crc kubenswrapper[4961]: E1205 18:26:10.823921 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104\": container with ID starting with 9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104 not found: ID does not exist" containerID="9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.823941 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104"} err="failed to get container status \"9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104\": rpc error: code = NotFound desc = could not find container \"9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104\": container with ID starting with 9eec8b16f8a23e8965c00c81725929d0a908ece9446fa824dfd12bc42f95f104 not found: ID does not exist" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.823954 4961 scope.go:117] "RemoveContainer" containerID="684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de" Dec 05 18:26:10 crc kubenswrapper[4961]: E1205 18:26:10.824489 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de\": container with ID starting with 684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de not found: ID does not exist" containerID="684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.824528 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de"} err="failed to get container status \"684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de\": rpc error: code = NotFound desc = could not find container \"684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de\": container with ID starting with 684c853a329eb915a94b15157ff3a09d7b94a58b7eb5895754db0f9a231815de not found: ID does not exist" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.864954 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:26:10 crc kubenswrapper[4961]: E1205 18:26:10.865402 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:26:10 crc kubenswrapper[4961]: I1205 18:26:10.883073 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a993fcd3-a607-4987-80ff-887da63738db" path="/var/lib/kubelet/pods/a993fcd3-a607-4987-80ff-887da63738db/volumes" Dec 05 18:26:15 crc kubenswrapper[4961]: I1205 18:26:15.825073 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:26:15 crc kubenswrapper[4961]: I1205 18:26:15.887540 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:26:16 crc kubenswrapper[4961]: I1205 18:26:16.066583 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-59qsl"] Dec 05 18:26:17 crc kubenswrapper[4961]: I1205 18:26:17.764687 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-59qsl" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="registry-server" containerID="cri-o://ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097" gracePeriod=2 Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.223501 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.319760 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-utilities\") pod \"cea6a107-2ced-4158-9446-ab66c0b5452f\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.319885 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rvfwl\" (UniqueName: \"kubernetes.io/projected/cea6a107-2ced-4158-9446-ab66c0b5452f-kube-api-access-rvfwl\") pod \"cea6a107-2ced-4158-9446-ab66c0b5452f\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.319935 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-catalog-content\") pod \"cea6a107-2ced-4158-9446-ab66c0b5452f\" (UID: \"cea6a107-2ced-4158-9446-ab66c0b5452f\") " Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.320767 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-utilities" (OuterVolumeSpecName: "utilities") pod "cea6a107-2ced-4158-9446-ab66c0b5452f" (UID: "cea6a107-2ced-4158-9446-ab66c0b5452f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.326227 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cea6a107-2ced-4158-9446-ab66c0b5452f-kube-api-access-rvfwl" (OuterVolumeSpecName: "kube-api-access-rvfwl") pod "cea6a107-2ced-4158-9446-ab66c0b5452f" (UID: "cea6a107-2ced-4158-9446-ab66c0b5452f"). InnerVolumeSpecName "kube-api-access-rvfwl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.381959 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cea6a107-2ced-4158-9446-ab66c0b5452f" (UID: "cea6a107-2ced-4158-9446-ab66c0b5452f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.422385 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.422420 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rvfwl\" (UniqueName: \"kubernetes.io/projected/cea6a107-2ced-4158-9446-ab66c0b5452f-kube-api-access-rvfwl\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.422431 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cea6a107-2ced-4158-9446-ab66c0b5452f-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.424931 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.468419 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.773901 4961 generic.go:334] "Generic (PLEG): container finished" podID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerID="ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097" exitCode=0 Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.773964 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-59qsl" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.773977 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59qsl" event={"ID":"cea6a107-2ced-4158-9446-ab66c0b5452f","Type":"ContainerDied","Data":"ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097"} Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.774032 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-59qsl" event={"ID":"cea6a107-2ced-4158-9446-ab66c0b5452f","Type":"ContainerDied","Data":"8c89f58eeddc38981e127e2bb997ba12eab30c3bb891d6168d7e792f206ad3fb"} Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.774049 4961 scope.go:117] "RemoveContainer" containerID="ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.799655 4961 scope.go:117] "RemoveContainer" containerID="bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.817097 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-59qsl"] Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.828084 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-59qsl"] Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.838211 4961 scope.go:117] "RemoveContainer" containerID="b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.877497 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" path="/var/lib/kubelet/pods/cea6a107-2ced-4158-9446-ab66c0b5452f/volumes" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.907309 4961 scope.go:117] "RemoveContainer" containerID="ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097" Dec 05 18:26:18 crc kubenswrapper[4961]: E1205 18:26:18.907798 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097\": container with ID starting with ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097 not found: ID does not exist" containerID="ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.907835 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097"} err="failed to get container status \"ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097\": rpc error: code = NotFound desc = could not find container \"ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097\": container with ID starting with ea444d63e42f4c8086ca528430aee71503cc0763059150c92bd79fa24c455097 not found: ID does not exist" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.907856 4961 scope.go:117] "RemoveContainer" containerID="bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d" Dec 05 18:26:18 crc kubenswrapper[4961]: E1205 18:26:18.908240 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d\": container with ID starting with bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d not found: ID does not exist" containerID="bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.908264 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d"} err="failed to get container status \"bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d\": rpc error: code = NotFound desc = could not find container \"bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d\": container with ID starting with bd2abdcd5ebdba445c1dc61b037a1ebae7b895ce421a521b3bf5fead3449e48d not found: ID does not exist" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.908277 4961 scope.go:117] "RemoveContainer" containerID="b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8" Dec 05 18:26:18 crc kubenswrapper[4961]: E1205 18:26:18.908541 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8\": container with ID starting with b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8 not found: ID does not exist" containerID="b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8" Dec 05 18:26:18 crc kubenswrapper[4961]: I1205 18:26:18.908591 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8"} err="failed to get container status \"b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8\": rpc error: code = NotFound desc = could not find container \"b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8\": container with ID starting with b98924c9cfb6a451d0e2b3141f884fc1ef96c49e822b0b4cfda6410a29dda9e8 not found: ID does not exist" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.071063 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gf74b"] Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.071750 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gf74b" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="registry-server" containerID="cri-o://d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e" gracePeriod=2 Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.568024 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.673358 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xqnhl\" (UniqueName: \"kubernetes.io/projected/64428795-e493-4f34-9e8c-78b07e430963-kube-api-access-xqnhl\") pod \"64428795-e493-4f34-9e8c-78b07e430963\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.674470 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-utilities\") pod \"64428795-e493-4f34-9e8c-78b07e430963\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.674547 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-catalog-content\") pod \"64428795-e493-4f34-9e8c-78b07e430963\" (UID: \"64428795-e493-4f34-9e8c-78b07e430963\") " Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.675230 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-utilities" (OuterVolumeSpecName: "utilities") pod "64428795-e493-4f34-9e8c-78b07e430963" (UID: "64428795-e493-4f34-9e8c-78b07e430963"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.677300 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.679902 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/64428795-e493-4f34-9e8c-78b07e430963-kube-api-access-xqnhl" (OuterVolumeSpecName: "kube-api-access-xqnhl") pod "64428795-e493-4f34-9e8c-78b07e430963" (UID: "64428795-e493-4f34-9e8c-78b07e430963"). InnerVolumeSpecName "kube-api-access-xqnhl". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.779097 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xqnhl\" (UniqueName: \"kubernetes.io/projected/64428795-e493-4f34-9e8c-78b07e430963-kube-api-access-xqnhl\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.809654 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "64428795-e493-4f34-9e8c-78b07e430963" (UID: "64428795-e493-4f34-9e8c-78b07e430963"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.810495 4961 generic.go:334] "Generic (PLEG): container finished" podID="64428795-e493-4f34-9e8c-78b07e430963" containerID="d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e" exitCode=0 Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.810537 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerDied","Data":"d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e"} Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.810563 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gf74b" event={"ID":"64428795-e493-4f34-9e8c-78b07e430963","Type":"ContainerDied","Data":"465604dbc31be79a3933bac2cd786d74373fb997511187f7992496e1683f48c9"} Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.810579 4961 scope.go:117] "RemoveContainer" containerID="d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.810585 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gf74b" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.833974 4961 scope.go:117] "RemoveContainer" containerID="84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.854898 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gf74b"] Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.862742 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gf74b"] Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.876109 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="64428795-e493-4f34-9e8c-78b07e430963" path="/var/lib/kubelet/pods/64428795-e493-4f34-9e8c-78b07e430963/volumes" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.880892 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/64428795-e493-4f34-9e8c-78b07e430963-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.887707 4961 scope.go:117] "RemoveContainer" containerID="3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.926074 4961 scope.go:117] "RemoveContainer" containerID="d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e" Dec 05 18:26:20 crc kubenswrapper[4961]: E1205 18:26:20.926724 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e\": container with ID starting with d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e not found: ID does not exist" containerID="d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.926831 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e"} err="failed to get container status \"d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e\": rpc error: code = NotFound desc = could not find container \"d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e\": container with ID starting with d6fcfbace49efc5eada4219174d3baa81324ab5e3fe4c658994277b7a30ea11e not found: ID does not exist" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.926864 4961 scope.go:117] "RemoveContainer" containerID="84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3" Dec 05 18:26:20 crc kubenswrapper[4961]: E1205 18:26:20.927531 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3\": container with ID starting with 84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3 not found: ID does not exist" containerID="84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.927575 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3"} err="failed to get container status \"84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3\": rpc error: code = NotFound desc = could not find container \"84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3\": container with ID starting with 84cb4798047f4a02f291358e68e460e2022a7f2ef7edfaac072dae9b05d82cd3 not found: ID does not exist" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.927605 4961 scope.go:117] "RemoveContainer" containerID="3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47" Dec 05 18:26:20 crc kubenswrapper[4961]: E1205 18:26:20.928085 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47\": container with ID starting with 3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47 not found: ID does not exist" containerID="3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47" Dec 05 18:26:20 crc kubenswrapper[4961]: I1205 18:26:20.928111 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47"} err="failed to get container status \"3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47\": rpc error: code = NotFound desc = could not find container \"3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47\": container with ID starting with 3b8502e309be13d170c06ff001482f585966f73ec89d889ac556901289232d47 not found: ID does not exist" Dec 05 18:26:24 crc kubenswrapper[4961]: I1205 18:26:24.871767 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:26:24 crc kubenswrapper[4961]: E1205 18:26:24.872710 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:26:37 crc kubenswrapper[4961]: I1205 18:26:37.863378 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:26:37 crc kubenswrapper[4961]: E1205 18:26:37.864111 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:26:50 crc kubenswrapper[4961]: I1205 18:26:50.863822 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:26:50 crc kubenswrapper[4961]: E1205 18:26:50.864566 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:27:05 crc kubenswrapper[4961]: I1205 18:27:05.863969 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:27:05 crc kubenswrapper[4961]: E1205 18:27:05.864796 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:27:18 crc kubenswrapper[4961]: I1205 18:27:18.864072 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:27:18 crc kubenswrapper[4961]: E1205 18:27:18.865634 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:27:30 crc kubenswrapper[4961]: I1205 18:27:30.864534 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:27:31 crc kubenswrapper[4961]: I1205 18:27:31.517842 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"0b84cec64ffa522721ce8551ebbea613d7a1ad484230eb497756106e0f4ee577"} Dec 05 18:29:57 crc kubenswrapper[4961]: I1205 18:29:57.246292 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:29:57 crc kubenswrapper[4961]: I1205 18:29:57.247247 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.164418 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz"] Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165629 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165650 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165673 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165679 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165690 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165697 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165707 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165715 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165726 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165731 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="extract-utilities" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165743 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165748 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165757 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165763 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165792 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165798 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4961]: E1205 18:30:00.165824 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.165829 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="extract-content" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.166030 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="cea6a107-2ced-4158-9446-ab66c0b5452f" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.166048 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="a993fcd3-a607-4987-80ff-887da63738db" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.166064 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="64428795-e493-4f34-9e8c-78b07e430963" containerName="registry-server" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.166819 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.169150 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.169583 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.217172 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz"] Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.333170 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd6133ff-8872-404e-8720-7022bc858d60-config-volume\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.333303 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd6133ff-8872-404e-8720-7022bc858d60-secret-volume\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.333588 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cqv5h\" (UniqueName: \"kubernetes.io/projected/bd6133ff-8872-404e-8720-7022bc858d60-kube-api-access-cqv5h\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.435477 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd6133ff-8872-404e-8720-7022bc858d60-secret-volume\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.435582 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqv5h\" (UniqueName: \"kubernetes.io/projected/bd6133ff-8872-404e-8720-7022bc858d60-kube-api-access-cqv5h\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.435645 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd6133ff-8872-404e-8720-7022bc858d60-config-volume\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.436538 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd6133ff-8872-404e-8720-7022bc858d60-config-volume\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.442408 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd6133ff-8872-404e-8720-7022bc858d60-secret-volume\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.463515 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqv5h\" (UniqueName: \"kubernetes.io/projected/bd6133ff-8872-404e-8720-7022bc858d60-kube-api-access-cqv5h\") pod \"collect-profiles-29415990-ghbdz\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.519182 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:00 crc kubenswrapper[4961]: I1205 18:30:00.989227 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz"] Dec 05 18:30:00 crc kubenswrapper[4961]: W1205 18:30:00.993410 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbd6133ff_8872_404e_8720_7022bc858d60.slice/crio-6e4ca83531c97dfc5cb1530e30a1ca754ea6e34cb0f9b91b77a153ec11c1997e WatchSource:0}: Error finding container 6e4ca83531c97dfc5cb1530e30a1ca754ea6e34cb0f9b91b77a153ec11c1997e: Status 404 returned error can't find the container with id 6e4ca83531c97dfc5cb1530e30a1ca754ea6e34cb0f9b91b77a153ec11c1997e Dec 05 18:30:01 crc kubenswrapper[4961]: I1205 18:30:01.013432 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" event={"ID":"bd6133ff-8872-404e-8720-7022bc858d60","Type":"ContainerStarted","Data":"6e4ca83531c97dfc5cb1530e30a1ca754ea6e34cb0f9b91b77a153ec11c1997e"} Dec 05 18:30:02 crc kubenswrapper[4961]: I1205 18:30:02.036691 4961 generic.go:334] "Generic (PLEG): container finished" podID="bd6133ff-8872-404e-8720-7022bc858d60" containerID="1cf50934cae4ab6aed16774b989ddfafb769e8a6b560e0e386f413169813a892" exitCode=0 Dec 05 18:30:02 crc kubenswrapper[4961]: I1205 18:30:02.036883 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" event={"ID":"bd6133ff-8872-404e-8720-7022bc858d60","Type":"ContainerDied","Data":"1cf50934cae4ab6aed16774b989ddfafb769e8a6b560e0e386f413169813a892"} Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.506283 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.703828 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd6133ff-8872-404e-8720-7022bc858d60-config-volume\") pod \"bd6133ff-8872-404e-8720-7022bc858d60\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.703935 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cqv5h\" (UniqueName: \"kubernetes.io/projected/bd6133ff-8872-404e-8720-7022bc858d60-kube-api-access-cqv5h\") pod \"bd6133ff-8872-404e-8720-7022bc858d60\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.703966 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd6133ff-8872-404e-8720-7022bc858d60-secret-volume\") pod \"bd6133ff-8872-404e-8720-7022bc858d60\" (UID: \"bd6133ff-8872-404e-8720-7022bc858d60\") " Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.705397 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bd6133ff-8872-404e-8720-7022bc858d60-config-volume" (OuterVolumeSpecName: "config-volume") pod "bd6133ff-8872-404e-8720-7022bc858d60" (UID: "bd6133ff-8872-404e-8720-7022bc858d60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.710208 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bd6133ff-8872-404e-8720-7022bc858d60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "bd6133ff-8872-404e-8720-7022bc858d60" (UID: "bd6133ff-8872-404e-8720-7022bc858d60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.710673 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd6133ff-8872-404e-8720-7022bc858d60-kube-api-access-cqv5h" (OuterVolumeSpecName: "kube-api-access-cqv5h") pod "bd6133ff-8872-404e-8720-7022bc858d60" (UID: "bd6133ff-8872-404e-8720-7022bc858d60"). InnerVolumeSpecName "kube-api-access-cqv5h". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.806419 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/bd6133ff-8872-404e-8720-7022bc858d60-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.806457 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cqv5h\" (UniqueName: \"kubernetes.io/projected/bd6133ff-8872-404e-8720-7022bc858d60-kube-api-access-cqv5h\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:03 crc kubenswrapper[4961]: I1205 18:30:03.806471 4961 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/bd6133ff-8872-404e-8720-7022bc858d60-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:04 crc kubenswrapper[4961]: I1205 18:30:04.057496 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" event={"ID":"bd6133ff-8872-404e-8720-7022bc858d60","Type":"ContainerDied","Data":"6e4ca83531c97dfc5cb1530e30a1ca754ea6e34cb0f9b91b77a153ec11c1997e"} Dec 05 18:30:04 crc kubenswrapper[4961]: I1205 18:30:04.057963 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="6e4ca83531c97dfc5cb1530e30a1ca754ea6e34cb0f9b91b77a153ec11c1997e" Dec 05 18:30:04 crc kubenswrapper[4961]: I1205 18:30:04.057582 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29415990-ghbdz" Dec 05 18:30:04 crc kubenswrapper[4961]: I1205 18:30:04.593552 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf"] Dec 05 18:30:04 crc kubenswrapper[4961]: I1205 18:30:04.600771 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415945-24bgf"] Dec 05 18:30:04 crc kubenswrapper[4961]: I1205 18:30:04.881059 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b55278a7-611a-48ff-bf15-07270f3614ed" path="/var/lib/kubelet/pods/b55278a7-611a-48ff-bf15-07270f3614ed/volumes" Dec 05 18:30:27 crc kubenswrapper[4961]: I1205 18:30:27.245546 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:30:27 crc kubenswrapper[4961]: I1205 18:30:27.246133 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:30:35 crc kubenswrapper[4961]: I1205 18:30:35.362617 4961 generic.go:334] "Generic (PLEG): container finished" podID="61480a22-be11-4fc5-83fa-8736dec80842" containerID="0550bc109d1cac8af881b6603902abe9bd2deeae7499fdd53482aa2b546845a1" exitCode=0 Dec 05 18:30:35 crc kubenswrapper[4961]: I1205 18:30:35.362708 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"61480a22-be11-4fc5-83fa-8736dec80842","Type":"ContainerDied","Data":"0550bc109d1cac8af881b6603902abe9bd2deeae7499fdd53482aa2b546845a1"} Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.815403 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.820281 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-logs\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.820440 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.820528 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config-secret\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.820659 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-workdir\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.820810 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-temporary\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.820930 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ssh-key\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.821074 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-config-data\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.821250 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ca-certs\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.821395 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7crlt\" (UniqueName: \"kubernetes.io/projected/61480a22-be11-4fc5-83fa-8736dec80842-kube-api-access-7crlt\") pod \"61480a22-be11-4fc5-83fa-8736dec80842\" (UID: \"61480a22-be11-4fc5-83fa-8736dec80842\") " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.821512 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-temporary" (OuterVolumeSpecName: "test-operator-ephemeral-temporary") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "test-operator-ephemeral-temporary". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.821677 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-config-data" (OuterVolumeSpecName: "config-data") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.822060 4961 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-config-data\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.822153 4961 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-temporary\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-temporary\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.828130 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-workdir" (OuterVolumeSpecName: "test-operator-ephemeral-workdir") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "test-operator-ephemeral-workdir". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.840003 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/61480a22-be11-4fc5-83fa-8736dec80842-kube-api-access-7crlt" (OuterVolumeSpecName: "kube-api-access-7crlt") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "kube-api-access-7crlt". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.839941 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage08-crc" (OuterVolumeSpecName: "test-operator-logs") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "local-storage08-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.859430 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.861345 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ssh-key" (OuterVolumeSpecName: "ssh-key") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "ssh-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.872676 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ca-certs" (OuterVolumeSpecName: "ca-certs") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "ca-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.903512 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "61480a22-be11-4fc5-83fa-8736dec80842" (UID: "61480a22-be11-4fc5-83fa-8736dec80842"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.923703 4961 reconciler_common.go:293] "Volume detached for volume \"ca-certs\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ca-certs\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.923735 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7crlt\" (UniqueName: \"kubernetes.io/projected/61480a22-be11-4fc5-83fa-8736dec80842-kube-api-access-7crlt\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.931213 4961 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" " Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.931416 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.931435 4961 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.931447 4961 reconciler_common.go:293] "Volume detached for volume \"test-operator-ephemeral-workdir\" (UniqueName: \"kubernetes.io/empty-dir/61480a22-be11-4fc5-83fa-8736dec80842-test-operator-ephemeral-workdir\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.931462 4961 reconciler_common.go:293] "Volume detached for volume \"ssh-key\" (UniqueName: \"kubernetes.io/secret/61480a22-be11-4fc5-83fa-8736dec80842-ssh-key\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:36 crc kubenswrapper[4961]: I1205 18:30:36.944240 4961 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage08-crc" (UniqueName: "kubernetes.io/local-volume/local-storage08-crc") on node "crc" Dec 05 18:30:37 crc kubenswrapper[4961]: I1205 18:30:37.034058 4961 reconciler_common.go:293] "Volume detached for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") on node \"crc\" DevicePath \"\"" Dec 05 18:30:37 crc kubenswrapper[4961]: I1205 18:30:37.391496 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/tempest-tests-tempest" event={"ID":"61480a22-be11-4fc5-83fa-8736dec80842","Type":"ContainerDied","Data":"e67616128f81e7650da534d02bc1c2bed596efad116c3db74d2832d1ee7ca95d"} Dec 05 18:30:37 crc kubenswrapper[4961]: I1205 18:30:37.391559 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e67616128f81e7650da534d02bc1c2bed596efad116c3db74d2832d1ee7ca95d" Dec 05 18:30:37 crc kubenswrapper[4961]: I1205 18:30:37.391694 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack/tempest-tests-tempest" Dec 05 18:30:45 crc kubenswrapper[4961]: I1205 18:30:45.251096 4961 scope.go:117] "RemoveContainer" containerID="26f55e6efde06fc09d4ab4da8729d7bfad995984a7933fbc8b315af12687863d" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.085173 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 18:30:48 crc kubenswrapper[4961]: E1205 18:30:48.086099 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="61480a22-be11-4fc5-83fa-8736dec80842" containerName="tempest-tests-tempest-tests-runner" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.086115 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="61480a22-be11-4fc5-83fa-8736dec80842" containerName="tempest-tests-tempest-tests-runner" Dec 05 18:30:48 crc kubenswrapper[4961]: E1205 18:30:48.086131 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd6133ff-8872-404e-8720-7022bc858d60" containerName="collect-profiles" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.086137 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd6133ff-8872-404e-8720-7022bc858d60" containerName="collect-profiles" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.086353 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd6133ff-8872-404e-8720-7022bc858d60" containerName="collect-profiles" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.086365 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="61480a22-be11-4fc5-83fa-8736dec80842" containerName="tempest-tests-tempest-tests-runner" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.087022 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.090021 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openstack"/"default-dockercfg-m9vdq" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.099869 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.154909 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.155269 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4jdtl\" (UniqueName: \"kubernetes.io/projected/8be5fc0b-4a17-4763-a30c-3d2053d33f29-kube-api-access-4jdtl\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.258033 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4jdtl\" (UniqueName: \"kubernetes.io/projected/8be5fc0b-4a17-4763-a30c-3d2053d33f29-kube-api-access-4jdtl\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.258305 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.258761 4961 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") device mount path \"/mnt/openstack/pv08\"" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.282569 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4jdtl\" (UniqueName: \"kubernetes.io/projected/8be5fc0b-4a17-4763-a30c-3d2053d33f29-kube-api-access-4jdtl\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.285433 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage08-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage08-crc\") pod \"test-operator-logs-pod-tempest-tempest-tests-tempest\" (UID: \"8be5fc0b-4a17-4763-a30c-3d2053d33f29\") " pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.415174 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" Dec 05 18:30:48 crc kubenswrapper[4961]: I1205 18:30:48.888139 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack/test-operator-logs-pod-tempest-tempest-tests-tempest"] Dec 05 18:30:49 crc kubenswrapper[4961]: I1205 18:30:49.502875 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"8be5fc0b-4a17-4763-a30c-3d2053d33f29","Type":"ContainerStarted","Data":"0df18d2b1ac53c2f2bcb2e2f75455820ac6341c8dde79f5166c05e33833ab813"} Dec 05 18:30:50 crc kubenswrapper[4961]: I1205 18:30:50.514319 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" event={"ID":"8be5fc0b-4a17-4763-a30c-3d2053d33f29","Type":"ContainerStarted","Data":"3aa594830ae258ec383d32b507874c80c65ca253d5357538f4837bfb605f64e6"} Dec 05 18:30:50 crc kubenswrapper[4961]: I1205 18:30:50.529311 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack/test-operator-logs-pod-tempest-tempest-tests-tempest" podStartSLOduration=1.671658232 podStartE2EDuration="2.529291813s" podCreationTimestamp="2025-12-05 18:30:48 +0000 UTC" firstStartedPulling="2025-12-05 18:30:48.895399771 +0000 UTC m=+3454.956550244" lastFinishedPulling="2025-12-05 18:30:49.753033352 +0000 UTC m=+3455.814183825" observedRunningTime="2025-12-05 18:30:50.526272499 +0000 UTC m=+3456.587423022" watchObservedRunningTime="2025-12-05 18:30:50.529291813 +0000 UTC m=+3456.590442296" Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.246269 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.247995 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.248056 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.248763 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"0b84cec64ffa522721ce8551ebbea613d7a1ad484230eb497756106e0f4ee577"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.248837 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://0b84cec64ffa522721ce8551ebbea613d7a1ad484230eb497756106e0f4ee577" gracePeriod=600 Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.604212 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="0b84cec64ffa522721ce8551ebbea613d7a1ad484230eb497756106e0f4ee577" exitCode=0 Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.604450 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"0b84cec64ffa522721ce8551ebbea613d7a1ad484230eb497756106e0f4ee577"} Dec 05 18:30:57 crc kubenswrapper[4961]: I1205 18:30:57.604724 4961 scope.go:117] "RemoveContainer" containerID="fd9000ce78464758fd7bbd54c1d7164a15b4e5977f73b091e72315741c27fdcf" Dec 05 18:30:58 crc kubenswrapper[4961]: I1205 18:30:58.618558 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828"} Dec 05 18:31:12 crc kubenswrapper[4961]: I1205 18:31:12.924937 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-ccglg/must-gather-zzwvt"] Dec 05 18:31:12 crc kubenswrapper[4961]: I1205 18:31:12.926940 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:12 crc kubenswrapper[4961]: I1205 18:31:12.930625 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-ccglg"/"default-dockercfg-6bb59" Dec 05 18:31:12 crc kubenswrapper[4961]: I1205 18:31:12.930635 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-ccglg"/"openshift-service-ca.crt" Dec 05 18:31:12 crc kubenswrapper[4961]: I1205 18:31:12.930753 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-ccglg"/"kube-root-ca.crt" Dec 05 18:31:12 crc kubenswrapper[4961]: I1205 18:31:12.948444 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-ccglg/must-gather-zzwvt"] Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.047617 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-must-gather-output\") pod \"must-gather-zzwvt\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.047670 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s6hxz\" (UniqueName: \"kubernetes.io/projected/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-kube-api-access-s6hxz\") pod \"must-gather-zzwvt\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.149361 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s6hxz\" (UniqueName: \"kubernetes.io/projected/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-kube-api-access-s6hxz\") pod \"must-gather-zzwvt\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.149603 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-must-gather-output\") pod \"must-gather-zzwvt\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.150086 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-must-gather-output\") pod \"must-gather-zzwvt\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.173262 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s6hxz\" (UniqueName: \"kubernetes.io/projected/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-kube-api-access-s6hxz\") pod \"must-gather-zzwvt\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.250291 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.698328 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-ccglg/must-gather-zzwvt"] Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.700931 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:31:13 crc kubenswrapper[4961]: I1205 18:31:13.780441 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/must-gather-zzwvt" event={"ID":"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c","Type":"ContainerStarted","Data":"95b0b3cf7ff24787e76cfa6ab8a1827456dac2cad2b590fa51a50234eee4da19"} Dec 05 18:31:17 crc kubenswrapper[4961]: I1205 18:31:17.824240 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/must-gather-zzwvt" event={"ID":"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c","Type":"ContainerStarted","Data":"5cd4510b9488d1a4b6aedea3941c2fe5848abf743f50b927f59a5711c301a8e2"} Dec 05 18:31:17 crc kubenswrapper[4961]: I1205 18:31:17.824835 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/must-gather-zzwvt" event={"ID":"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c","Type":"ContainerStarted","Data":"0a30b434aa558007cc7989252780acc8bf5d18914bc9739be0c324522a445915"} Dec 05 18:31:17 crc kubenswrapper[4961]: I1205 18:31:17.854246 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-ccglg/must-gather-zzwvt" podStartSLOduration=2.282828181 podStartE2EDuration="5.854219066s" podCreationTimestamp="2025-12-05 18:31:12 +0000 UTC" firstStartedPulling="2025-12-05 18:31:13.700708123 +0000 UTC m=+3479.761858596" lastFinishedPulling="2025-12-05 18:31:17.272099008 +0000 UTC m=+3483.333249481" observedRunningTime="2025-12-05 18:31:17.840642624 +0000 UTC m=+3483.901793117" watchObservedRunningTime="2025-12-05 18:31:17.854219066 +0000 UTC m=+3483.915369569" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.093219 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-ccglg/crc-debug-z7r89"] Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.094894 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.243079 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e4031d0-2a81-455f-94b9-1fe59b91ec78-host\") pod \"crc-debug-z7r89\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.243552 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn5wc\" (UniqueName: \"kubernetes.io/projected/6e4031d0-2a81-455f-94b9-1fe59b91ec78-kube-api-access-tn5wc\") pod \"crc-debug-z7r89\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.344917 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e4031d0-2a81-455f-94b9-1fe59b91ec78-host\") pod \"crc-debug-z7r89\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.344983 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn5wc\" (UniqueName: \"kubernetes.io/projected/6e4031d0-2a81-455f-94b9-1fe59b91ec78-kube-api-access-tn5wc\") pod \"crc-debug-z7r89\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.345097 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e4031d0-2a81-455f-94b9-1fe59b91ec78-host\") pod \"crc-debug-z7r89\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.368082 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn5wc\" (UniqueName: \"kubernetes.io/projected/6e4031d0-2a81-455f-94b9-1fe59b91ec78-kube-api-access-tn5wc\") pod \"crc-debug-z7r89\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.411969 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:31:21 crc kubenswrapper[4961]: I1205 18:31:21.861846 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-z7r89" event={"ID":"6e4031d0-2a81-455f-94b9-1fe59b91ec78","Type":"ContainerStarted","Data":"9afffe1dd3eaebb3cb02a5df932262b062e9d57bd1781bad0ae18bf8a39e2307"} Dec 05 18:31:32 crc kubenswrapper[4961]: I1205 18:31:32.977188 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-z7r89" event={"ID":"6e4031d0-2a81-455f-94b9-1fe59b91ec78","Type":"ContainerStarted","Data":"02caa040780271ac3a7df31143866f5b313b38acdbba195c884fd7dc51d3594c"} Dec 05 18:31:32 crc kubenswrapper[4961]: I1205 18:31:32.998479 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-ccglg/crc-debug-z7r89" podStartSLOduration=1.510521757 podStartE2EDuration="11.99845981s" podCreationTimestamp="2025-12-05 18:31:21 +0000 UTC" firstStartedPulling="2025-12-05 18:31:21.44807529 +0000 UTC m=+3487.509225763" lastFinishedPulling="2025-12-05 18:31:31.936013343 +0000 UTC m=+3497.997163816" observedRunningTime="2025-12-05 18:31:32.992149186 +0000 UTC m=+3499.053299659" watchObservedRunningTime="2025-12-05 18:31:32.99845981 +0000 UTC m=+3499.059610283" Dec 05 18:32:12 crc kubenswrapper[4961]: I1205 18:32:12.352554 4961 generic.go:334] "Generic (PLEG): container finished" podID="6e4031d0-2a81-455f-94b9-1fe59b91ec78" containerID="02caa040780271ac3a7df31143866f5b313b38acdbba195c884fd7dc51d3594c" exitCode=0 Dec 05 18:32:12 crc kubenswrapper[4961]: I1205 18:32:12.352689 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-z7r89" event={"ID":"6e4031d0-2a81-455f-94b9-1fe59b91ec78","Type":"ContainerDied","Data":"02caa040780271ac3a7df31143866f5b313b38acdbba195c884fd7dc51d3594c"} Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.497346 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.547854 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-ccglg/crc-debug-z7r89"] Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.550383 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e4031d0-2a81-455f-94b9-1fe59b91ec78-host\") pod \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.550598 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tn5wc\" (UniqueName: \"kubernetes.io/projected/6e4031d0-2a81-455f-94b9-1fe59b91ec78-kube-api-access-tn5wc\") pod \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\" (UID: \"6e4031d0-2a81-455f-94b9-1fe59b91ec78\") " Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.550945 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/6e4031d0-2a81-455f-94b9-1fe59b91ec78-host" (OuterVolumeSpecName: "host") pod "6e4031d0-2a81-455f-94b9-1fe59b91ec78" (UID: "6e4031d0-2a81-455f-94b9-1fe59b91ec78"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.551255 4961 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/6e4031d0-2a81-455f-94b9-1fe59b91ec78-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.557002 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-ccglg/crc-debug-z7r89"] Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.564029 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6e4031d0-2a81-455f-94b9-1fe59b91ec78-kube-api-access-tn5wc" (OuterVolumeSpecName: "kube-api-access-tn5wc") pod "6e4031d0-2a81-455f-94b9-1fe59b91ec78" (UID: "6e4031d0-2a81-455f-94b9-1fe59b91ec78"). InnerVolumeSpecName "kube-api-access-tn5wc". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:32:13 crc kubenswrapper[4961]: I1205 18:32:13.653662 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tn5wc\" (UniqueName: \"kubernetes.io/projected/6e4031d0-2a81-455f-94b9-1fe59b91ec78-kube-api-access-tn5wc\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.393883 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9afffe1dd3eaebb3cb02a5df932262b062e9d57bd1781bad0ae18bf8a39e2307" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.393969 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-z7r89" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.833093 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-ccglg/crc-debug-qgj67"] Dec 05 18:32:14 crc kubenswrapper[4961]: E1205 18:32:14.835524 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6e4031d0-2a81-455f-94b9-1fe59b91ec78" containerName="container-00" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.835557 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6e4031d0-2a81-455f-94b9-1fe59b91ec78" containerName="container-00" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.836407 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6e4031d0-2a81-455f-94b9-1fe59b91ec78" containerName="container-00" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.840087 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.888821 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6e4031d0-2a81-455f-94b9-1fe59b91ec78" path="/var/lib/kubelet/pods/6e4031d0-2a81-455f-94b9-1fe59b91ec78/volumes" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.895239 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8480d142-16ef-4d80-95ca-b2a41478384a-host\") pod \"crc-debug-qgj67\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.895338 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sn947\" (UniqueName: \"kubernetes.io/projected/8480d142-16ef-4d80-95ca-b2a41478384a-kube-api-access-sn947\") pod \"crc-debug-qgj67\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.997203 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8480d142-16ef-4d80-95ca-b2a41478384a-host\") pod \"crc-debug-qgj67\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.997326 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sn947\" (UniqueName: \"kubernetes.io/projected/8480d142-16ef-4d80-95ca-b2a41478384a-kube-api-access-sn947\") pod \"crc-debug-qgj67\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:14 crc kubenswrapper[4961]: I1205 18:32:14.997358 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8480d142-16ef-4d80-95ca-b2a41478384a-host\") pod \"crc-debug-qgj67\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:15 crc kubenswrapper[4961]: I1205 18:32:15.048508 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sn947\" (UniqueName: \"kubernetes.io/projected/8480d142-16ef-4d80-95ca-b2a41478384a-kube-api-access-sn947\") pod \"crc-debug-qgj67\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:15 crc kubenswrapper[4961]: I1205 18:32:15.168624 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:15 crc kubenswrapper[4961]: I1205 18:32:15.408866 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-qgj67" event={"ID":"8480d142-16ef-4d80-95ca-b2a41478384a","Type":"ContainerStarted","Data":"fb70322c3185435d671d1202300e092eae2f65d568504133d1642339b25c5a6c"} Dec 05 18:32:16 crc kubenswrapper[4961]: I1205 18:32:16.423912 4961 generic.go:334] "Generic (PLEG): container finished" podID="8480d142-16ef-4d80-95ca-b2a41478384a" containerID="9f9b6989a7365371e805afad607ad815fdd2668f1bf33e5bfb08bb410a94390b" exitCode=0 Dec 05 18:32:16 crc kubenswrapper[4961]: I1205 18:32:16.423962 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-qgj67" event={"ID":"8480d142-16ef-4d80-95ca-b2a41478384a","Type":"ContainerDied","Data":"9f9b6989a7365371e805afad607ad815fdd2668f1bf33e5bfb08bb410a94390b"} Dec 05 18:32:16 crc kubenswrapper[4961]: I1205 18:32:16.933535 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-ccglg/crc-debug-qgj67"] Dec 05 18:32:16 crc kubenswrapper[4961]: I1205 18:32:16.941531 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-ccglg/crc-debug-qgj67"] Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.167599 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gpsfv"] Dec 05 18:32:17 crc kubenswrapper[4961]: E1205 18:32:17.168148 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8480d142-16ef-4d80-95ca-b2a41478384a" containerName="container-00" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.168168 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="8480d142-16ef-4d80-95ca-b2a41478384a" containerName="container-00" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.168364 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="8480d142-16ef-4d80-95ca-b2a41478384a" containerName="container-00" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.170087 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.180193 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpsfv"] Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.338093 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mpdqn\" (UniqueName: \"kubernetes.io/projected/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-kube-api-access-mpdqn\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.338331 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-catalog-content\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.338562 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-utilities\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.440133 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mpdqn\" (UniqueName: \"kubernetes.io/projected/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-kube-api-access-mpdqn\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.440188 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-catalog-content\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.440272 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-utilities\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.440716 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-utilities\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.441315 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-catalog-content\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.467029 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mpdqn\" (UniqueName: \"kubernetes.io/projected/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-kube-api-access-mpdqn\") pod \"redhat-marketplace-gpsfv\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.497836 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.626377 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.746261 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sn947\" (UniqueName: \"kubernetes.io/projected/8480d142-16ef-4d80-95ca-b2a41478384a-kube-api-access-sn947\") pod \"8480d142-16ef-4d80-95ca-b2a41478384a\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.746446 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8480d142-16ef-4d80-95ca-b2a41478384a-host\") pod \"8480d142-16ef-4d80-95ca-b2a41478384a\" (UID: \"8480d142-16ef-4d80-95ca-b2a41478384a\") " Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.746914 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8480d142-16ef-4d80-95ca-b2a41478384a-host" (OuterVolumeSpecName: "host") pod "8480d142-16ef-4d80-95ca-b2a41478384a" (UID: "8480d142-16ef-4d80-95ca-b2a41478384a"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.755065 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8480d142-16ef-4d80-95ca-b2a41478384a-kube-api-access-sn947" (OuterVolumeSpecName: "kube-api-access-sn947") pod "8480d142-16ef-4d80-95ca-b2a41478384a" (UID: "8480d142-16ef-4d80-95ca-b2a41478384a"). InnerVolumeSpecName "kube-api-access-sn947". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.785938 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpsfv"] Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.849148 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sn947\" (UniqueName: \"kubernetes.io/projected/8480d142-16ef-4d80-95ca-b2a41478384a-kube-api-access-sn947\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:17 crc kubenswrapper[4961]: I1205 18:32:17.849176 4961 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8480d142-16ef-4d80-95ca-b2a41478384a-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.115929 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-ccglg/crc-debug-lj2xt"] Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.117178 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.258029 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bq9d6\" (UniqueName: \"kubernetes.io/projected/e4dfb722-6f7e-48bc-bfc9-36150192e418-kube-api-access-bq9d6\") pod \"crc-debug-lj2xt\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.258618 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4dfb722-6f7e-48bc-bfc9-36150192e418-host\") pod \"crc-debug-lj2xt\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.360961 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4dfb722-6f7e-48bc-bfc9-36150192e418-host\") pod \"crc-debug-lj2xt\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.361139 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4dfb722-6f7e-48bc-bfc9-36150192e418-host\") pod \"crc-debug-lj2xt\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.361536 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bq9d6\" (UniqueName: \"kubernetes.io/projected/e4dfb722-6f7e-48bc-bfc9-36150192e418-kube-api-access-bq9d6\") pod \"crc-debug-lj2xt\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.381571 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bq9d6\" (UniqueName: \"kubernetes.io/projected/e4dfb722-6f7e-48bc-bfc9-36150192e418-kube-api-access-bq9d6\") pod \"crc-debug-lj2xt\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.434001 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.446831 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fb70322c3185435d671d1202300e092eae2f65d568504133d1642339b25c5a6c" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.448027 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-qgj67" Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.462525 4961 generic.go:334] "Generic (PLEG): container finished" podID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerID="fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd" exitCode=0 Dec 05 18:32:18 crc kubenswrapper[4961]: W1205 18:32:18.462547 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode4dfb722_6f7e_48bc_bfc9_36150192e418.slice/crio-aa0cc39363aeb2d2f80593b9a979cd084ed0072dce37f282af94ac4eda376f8e WatchSource:0}: Error finding container aa0cc39363aeb2d2f80593b9a979cd084ed0072dce37f282af94ac4eda376f8e: Status 404 returned error can't find the container with id aa0cc39363aeb2d2f80593b9a979cd084ed0072dce37f282af94ac4eda376f8e Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.462579 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpsfv" event={"ID":"3a61f445-54b4-43e8-8810-41e5fcfd2dd0","Type":"ContainerDied","Data":"fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd"} Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.462609 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpsfv" event={"ID":"3a61f445-54b4-43e8-8810-41e5fcfd2dd0","Type":"ContainerStarted","Data":"239ba383caf76ad13c81b1782cfb0f79edb76a3803e60d8ce9fd83236d6e8e15"} Dec 05 18:32:18 crc kubenswrapper[4961]: I1205 18:32:18.875288 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8480d142-16ef-4d80-95ca-b2a41478384a" path="/var/lib/kubelet/pods/8480d142-16ef-4d80-95ca-b2a41478384a/volumes" Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.478706 4961 generic.go:334] "Generic (PLEG): container finished" podID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerID="c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4" exitCode=0 Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.478803 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpsfv" event={"ID":"3a61f445-54b4-43e8-8810-41e5fcfd2dd0","Type":"ContainerDied","Data":"c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4"} Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.483288 4961 generic.go:334] "Generic (PLEG): container finished" podID="e4dfb722-6f7e-48bc-bfc9-36150192e418" containerID="8f0c2f7d90b996484834909508bc98b73373428d040df2f86cd328af218d4ac1" exitCode=0 Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.483349 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-lj2xt" event={"ID":"e4dfb722-6f7e-48bc-bfc9-36150192e418","Type":"ContainerDied","Data":"8f0c2f7d90b996484834909508bc98b73373428d040df2f86cd328af218d4ac1"} Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.483400 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/crc-debug-lj2xt" event={"ID":"e4dfb722-6f7e-48bc-bfc9-36150192e418","Type":"ContainerStarted","Data":"aa0cc39363aeb2d2f80593b9a979cd084ed0072dce37f282af94ac4eda376f8e"} Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.560947 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-ccglg/crc-debug-lj2xt"] Dec 05 18:32:19 crc kubenswrapper[4961]: I1205 18:32:19.570575 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-ccglg/crc-debug-lj2xt"] Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.495317 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpsfv" event={"ID":"3a61f445-54b4-43e8-8810-41e5fcfd2dd0","Type":"ContainerStarted","Data":"51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3"} Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.522331 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gpsfv" podStartSLOduration=2.121305005 podStartE2EDuration="3.522307736s" podCreationTimestamp="2025-12-05 18:32:17 +0000 UTC" firstStartedPulling="2025-12-05 18:32:18.464650746 +0000 UTC m=+3544.525801219" lastFinishedPulling="2025-12-05 18:32:19.865653467 +0000 UTC m=+3545.926803950" observedRunningTime="2025-12-05 18:32:20.514617389 +0000 UTC m=+3546.575767872" watchObservedRunningTime="2025-12-05 18:32:20.522307736 +0000 UTC m=+3546.583458209" Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.626632 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.702257 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bq9d6\" (UniqueName: \"kubernetes.io/projected/e4dfb722-6f7e-48bc-bfc9-36150192e418-kube-api-access-bq9d6\") pod \"e4dfb722-6f7e-48bc-bfc9-36150192e418\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.702324 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4dfb722-6f7e-48bc-bfc9-36150192e418-host\") pod \"e4dfb722-6f7e-48bc-bfc9-36150192e418\" (UID: \"e4dfb722-6f7e-48bc-bfc9-36150192e418\") " Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.702421 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/e4dfb722-6f7e-48bc-bfc9-36150192e418-host" (OuterVolumeSpecName: "host") pod "e4dfb722-6f7e-48bc-bfc9-36150192e418" (UID: "e4dfb722-6f7e-48bc-bfc9-36150192e418"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.702811 4961 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/e4dfb722-6f7e-48bc-bfc9-36150192e418-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.710090 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e4dfb722-6f7e-48bc-bfc9-36150192e418-kube-api-access-bq9d6" (OuterVolumeSpecName: "kube-api-access-bq9d6") pod "e4dfb722-6f7e-48bc-bfc9-36150192e418" (UID: "e4dfb722-6f7e-48bc-bfc9-36150192e418"). InnerVolumeSpecName "kube-api-access-bq9d6". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.804546 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bq9d6\" (UniqueName: \"kubernetes.io/projected/e4dfb722-6f7e-48bc-bfc9-36150192e418-kube-api-access-bq9d6\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:20 crc kubenswrapper[4961]: I1205 18:32:20.873838 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e4dfb722-6f7e-48bc-bfc9-36150192e418" path="/var/lib/kubelet/pods/e4dfb722-6f7e-48bc-bfc9-36150192e418/volumes" Dec 05 18:32:21 crc kubenswrapper[4961]: I1205 18:32:21.506334 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/crc-debug-lj2xt" Dec 05 18:32:21 crc kubenswrapper[4961]: I1205 18:32:21.506338 4961 scope.go:117] "RemoveContainer" containerID="8f0c2f7d90b996484834909508bc98b73373428d040df2f86cd328af218d4ac1" Dec 05 18:32:27 crc kubenswrapper[4961]: I1205 18:32:27.498052 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:27 crc kubenswrapper[4961]: I1205 18:32:27.498633 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:27 crc kubenswrapper[4961]: I1205 18:32:27.552204 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:27 crc kubenswrapper[4961]: I1205 18:32:27.615241 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:27 crc kubenswrapper[4961]: I1205 18:32:27.792794 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpsfv"] Dec 05 18:32:29 crc kubenswrapper[4961]: I1205 18:32:29.573705 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gpsfv" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="registry-server" containerID="cri-o://51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3" gracePeriod=2 Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.057943 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.192886 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mpdqn\" (UniqueName: \"kubernetes.io/projected/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-kube-api-access-mpdqn\") pod \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.192988 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-catalog-content\") pod \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.194016 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-utilities" (OuterVolumeSpecName: "utilities") pod "3a61f445-54b4-43e8-8810-41e5fcfd2dd0" (UID: "3a61f445-54b4-43e8-8810-41e5fcfd2dd0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.193017 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-utilities\") pod \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\" (UID: \"3a61f445-54b4-43e8-8810-41e5fcfd2dd0\") " Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.195069 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.199093 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-kube-api-access-mpdqn" (OuterVolumeSpecName: "kube-api-access-mpdqn") pod "3a61f445-54b4-43e8-8810-41e5fcfd2dd0" (UID: "3a61f445-54b4-43e8-8810-41e5fcfd2dd0"). InnerVolumeSpecName "kube-api-access-mpdqn". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.211767 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3a61f445-54b4-43e8-8810-41e5fcfd2dd0" (UID: "3a61f445-54b4-43e8-8810-41e5fcfd2dd0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.297689 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mpdqn\" (UniqueName: \"kubernetes.io/projected/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-kube-api-access-mpdqn\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.297729 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3a61f445-54b4-43e8-8810-41e5fcfd2dd0-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.589070 4961 generic.go:334] "Generic (PLEG): container finished" podID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerID="51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3" exitCode=0 Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.589160 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpsfv" event={"ID":"3a61f445-54b4-43e8-8810-41e5fcfd2dd0","Type":"ContainerDied","Data":"51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3"} Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.589232 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gpsfv" event={"ID":"3a61f445-54b4-43e8-8810-41e5fcfd2dd0","Type":"ContainerDied","Data":"239ba383caf76ad13c81b1782cfb0f79edb76a3803e60d8ce9fd83236d6e8e15"} Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.589264 4961 scope.go:117] "RemoveContainer" containerID="51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.589550 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gpsfv" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.641441 4961 scope.go:117] "RemoveContainer" containerID="c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.654501 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpsfv"] Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.667374 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gpsfv"] Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.670338 4961 scope.go:117] "RemoveContainer" containerID="fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.729151 4961 scope.go:117] "RemoveContainer" containerID="51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3" Dec 05 18:32:30 crc kubenswrapper[4961]: E1205 18:32:30.729953 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3\": container with ID starting with 51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3 not found: ID does not exist" containerID="51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.729998 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3"} err="failed to get container status \"51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3\": rpc error: code = NotFound desc = could not find container \"51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3\": container with ID starting with 51cb2b407804c2d77dd5daf06010f564553d0a30ae1cb4c6a980c3312e3746a3 not found: ID does not exist" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.730028 4961 scope.go:117] "RemoveContainer" containerID="c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4" Dec 05 18:32:30 crc kubenswrapper[4961]: E1205 18:32:30.730489 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4\": container with ID starting with c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4 not found: ID does not exist" containerID="c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.730534 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4"} err="failed to get container status \"c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4\": rpc error: code = NotFound desc = could not find container \"c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4\": container with ID starting with c05d5f244d555481075f25c0a30b6d3d49caa8f18a20a899ba0a553e63935df4 not found: ID does not exist" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.730562 4961 scope.go:117] "RemoveContainer" containerID="fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd" Dec 05 18:32:30 crc kubenswrapper[4961]: E1205 18:32:30.730981 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd\": container with ID starting with fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd not found: ID does not exist" containerID="fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.731008 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd"} err="failed to get container status \"fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd\": rpc error: code = NotFound desc = could not find container \"fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd\": container with ID starting with fe10b939f06fd534eac8dc1f5bbd800acf4de96e113f2804bc7199669bf65abd not found: ID does not exist" Dec 05 18:32:30 crc kubenswrapper[4961]: I1205 18:32:30.877875 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" path="/var/lib/kubelet/pods/3a61f445-54b4-43e8-8810-41e5fcfd2dd0/volumes" Dec 05 18:32:35 crc kubenswrapper[4961]: I1205 18:32:35.662932 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5d86bc7b8-5vlfw_c8daeb2b-9caa-4a76-b22a-e3320f0235a0/barbican-api/0.log" Dec 05 18:32:35 crc kubenswrapper[4961]: I1205 18:32:35.769005 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5d86bc7b8-5vlfw_c8daeb2b-9caa-4a76-b22a-e3320f0235a0/barbican-api-log/0.log" Dec 05 18:32:35 crc kubenswrapper[4961]: I1205 18:32:35.862443 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c7dc5bc58-54zmv_9db816ec-0b58-40b0-a063-974df541802b/barbican-keystone-listener/0.log" Dec 05 18:32:35 crc kubenswrapper[4961]: I1205 18:32:35.899156 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c7dc5bc58-54zmv_9db816ec-0b58-40b0-a063-974df541802b/barbican-keystone-listener-log/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.051408 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-56bf9fd9dc-6zsc7_1b291fd5-a251-45db-8c2b-334f43909f1f/barbican-worker/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.086459 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-56bf9fd9dc-6zsc7_1b291fd5-a251-45db-8c2b-334f43909f1f/barbican-worker-log/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.231492 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-84hml_b9050523-5c05-47cd-9e51-85703488427f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.282474 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/ceilometer-central-agent/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.369952 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/ceilometer-notification-agent/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.534717 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/proxy-httpd/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.601851 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/sg-core/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.704493 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7aa62ef2-e824-4f99-98bc-d4049b51ab7e/cinder-api/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.773502 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7aa62ef2-e824-4f99-98bc-d4049b51ab7e/cinder-api-log/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.925719 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9188090c-6109-45b2-b63c-1656ebb2ad0e/probe/0.log" Dec 05 18:32:36 crc kubenswrapper[4961]: I1205 18:32:36.950821 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9188090c-6109-45b2-b63c-1656ebb2ad0e/cinder-scheduler/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.147344 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2_8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.157523 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-592hz_877c43bb-852d-4f38-8322-8c72200ca936/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.339983 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-rw7cz_1270f427-e53f-410f-b9ae-9cf12c5dffe1/init/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.514272 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-rw7cz_1270f427-e53f-410f-b9ae-9cf12c5dffe1/init/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.527661 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-rw7cz_1270f427-e53f-410f-b9ae-9cf12c5dffe1/dnsmasq-dns/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.571135 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7trn8_c2f474d4-f96a-45cd-9432-b90f703a6b81/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.730950 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96/glance-httpd/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.771882 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96/glance-log/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.918702 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3ac5ebd0-a089-428c-a698-cbd1f6c50c57/glance-log/0.log" Dec 05 18:32:37 crc kubenswrapper[4961]: I1205 18:32:37.926605 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3ac5ebd0-a089-428c-a698-cbd1f6c50c57/glance-httpd/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.053902 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-78bb69647d-95ptt_a3dcddde-25f9-446a-8d5f-d9468cfa6940/horizon/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.242813 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-925jr_b8b70414-5c28-428e-90be-4e5d82070919/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.343091 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-7lbk7_0e516b37-5d75-47c4-af9c-438a41abf158/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.419133 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-78bb69647d-95ptt_a3dcddde-25f9-446a-8d5f-d9468cfa6940/horizon-log/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.682787 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7896fbd4bd-l9rg6_3e200718-485e-49b6-b4ab-8311a9178f66/keystone-api/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.738207 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415961-66rdg_1f248e91-833b-4136-ad16-f32f9aff9513/keystone-cron/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.840271 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e5e6ba28-82ee-411a-a9e1-46db404bdff6/kube-state-metrics/0.log" Dec 05 18:32:38 crc kubenswrapper[4961]: I1205 18:32:38.903505 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb_350a4c11-1d87-4f63-8ec8-c808de6e46b0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:39 crc kubenswrapper[4961]: I1205 18:32:39.302064 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-88c564b55-ktjt8_84b8d919-505e-44ba-b19a-532ec4df3533/neutron-api/0.log" Dec 05 18:32:39 crc kubenswrapper[4961]: I1205 18:32:39.433501 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-88c564b55-ktjt8_84b8d919-505e-44ba-b19a-532ec4df3533/neutron-httpd/0.log" Dec 05 18:32:39 crc kubenswrapper[4961]: I1205 18:32:39.523836 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk_baa4c345-2f59-42ac-a33e-c350c642a73c/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:39 crc kubenswrapper[4961]: I1205 18:32:39.978136 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_258d7583-7072-4621-8490-c0bfcc91abff/nova-api-log/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.005542 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b8e29a1c-419b-4ab0-84ca-b87652bf1812/nova-cell0-conductor-conductor/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.195242 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_258d7583-7072-4621-8490-c0bfcc91abff/nova-api-api/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.265326 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_1e14fb01-680d-425f-a35b-c6346f47b86d/nova-cell1-conductor-conductor/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.391581 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2de9d115-6198-4316-a304-1e4eca7cdd98/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.491908 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-crdt6_19f9b2f9-7ecf-4676-bd98-c3c4615d12c9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.652798 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_56cc0cd5-a044-49c6-946c-82e56b2c4d57/nova-metadata-log/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.906847 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_839ae7fd-5c5c-4767-b0d8-c7f24f17b03b/nova-scheduler-scheduler/0.log" Dec 05 18:32:40 crc kubenswrapper[4961]: I1205 18:32:40.928852 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_786a3535-1c16-4389-9239-49f6d349c3af/mysql-bootstrap/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.122824 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_786a3535-1c16-4389-9239-49f6d349c3af/mysql-bootstrap/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.135118 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_786a3535-1c16-4389-9239-49f6d349c3af/galera/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.325265 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78297e26-2e01-4bb1-8f35-c96861dfda09/mysql-bootstrap/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.495007 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78297e26-2e01-4bb1-8f35-c96861dfda09/mysql-bootstrap/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.518671 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78297e26-2e01-4bb1-8f35-c96861dfda09/galera/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.723577 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ce8ed9d8-89a0-4d15-9f08-d30111d16a2e/openstackclient/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.773761 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_56cc0cd5-a044-49c6-946c-82e56b2c4d57/nova-metadata-metadata/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.806768 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ldph7_6b6b1e99-e081-4c93-8fe8-c693eb7a0205/ovn-controller/0.log" Dec 05 18:32:41 crc kubenswrapper[4961]: I1205 18:32:41.997137 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-bkcrz_3854940d-06c0-4afd-a62f-eeeff97e5b7f/openstack-network-exporter/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.101054 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovsdb-server-init/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.235451 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovsdb-server-init/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.273290 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovs-vswitchd/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.326405 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovsdb-server/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.494596 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-nrkgn_2428c0ac-375e-4515-9dfd-39f68c6e6ed1/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.545498 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_419fa856-384a-4fd1-95e7-7810e12b1307/openstack-network-exporter/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.563216 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_419fa856-384a-4fd1-95e7-7810e12b1307/ovn-northd/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.725903 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_38c3904c-e6f1-4d83-bfbc-d5c39f52d67d/openstack-network-exporter/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.742892 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_38c3904c-e6f1-4d83-bfbc-d5c39f52d67d/ovsdbserver-nb/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.928183 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1603a1ba-53e0-4707-a222-392195709f98/openstack-network-exporter/0.log" Dec 05 18:32:42 crc kubenswrapper[4961]: I1205 18:32:42.979967 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1603a1ba-53e0-4707-a222-392195709f98/ovsdbserver-sb/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.110449 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-846ff7879b-wj44p_9e625db8-8bfa-4a00-957c-8a31f781da4f/placement-api/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.188495 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2deb3a6e-b9b0-4e6d-a755-286adb0a3975/setup-container/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.198400 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-846ff7879b-wj44p_9e625db8-8bfa-4a00-957c-8a31f781da4f/placement-log/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.453804 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2deb3a6e-b9b0-4e6d-a755-286adb0a3975/rabbitmq/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.484607 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2deb3a6e-b9b0-4e6d-a755-286adb0a3975/setup-container/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.529278 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c21abdd4-f06b-4865-8880-0603525e1cb1/setup-container/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.667912 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c21abdd4-f06b-4865-8880-0603525e1cb1/setup-container/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.702627 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c21abdd4-f06b-4865-8880-0603525e1cb1/rabbitmq/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.804531 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8_f2a419b8-744d-4932-a845-d9376364834b/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:43 crc kubenswrapper[4961]: I1205 18:32:43.935013 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5wwq4_c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.038628 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8_1c42b2d0-4525-4847-a505-a625b88765b9/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.135704 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-mdm44_53528787-f94b-4255-bb5b-57b8c583eaaf/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.403557 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-qgg4h_fdec8a7a-22cd-486f-81e5-9a8a7931b9bc/ssh-known-hosts-edpm-deployment/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.661984 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5d77d54f6c-4pccf_dcca830e-9231-4c67-b5fa-669102d7ecc8/proxy-server/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.700976 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5d77d54f6c-4pccf_dcca830e-9231-4c67-b5fa-669102d7ecc8/proxy-httpd/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.778982 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-2mrxc_2b26a944-dad9-45ea-b636-5f2ddaadc80d/swift-ring-rebalance/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.871013 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-auditor/0.log" Dec 05 18:32:44 crc kubenswrapper[4961]: I1205 18:32:44.931519 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-reaper/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.116192 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-replicator/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.132984 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-auditor/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.166732 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-server/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.186797 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-replicator/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.376831 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-server/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.376880 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-updater/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.393972 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-expirer/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.427143 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-auditor/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.607515 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-server/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.618734 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-replicator/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.645894 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-updater/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.657926 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/rsync/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.847760 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/swift-recon-cron/0.log" Dec 05 18:32:45 crc kubenswrapper[4961]: I1205 18:32:45.903546 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq_96c8a8f6-8c0a-4c00-b80e-719556036c4e/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:46 crc kubenswrapper[4961]: I1205 18:32:46.186486 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_61480a22-be11-4fc5-83fa-8736dec80842/tempest-tests-tempest-tests-runner/0.log" Dec 05 18:32:46 crc kubenswrapper[4961]: I1205 18:32:46.225200 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_8be5fc0b-4a17-4763-a30c-3d2053d33f29/test-operator-logs-container/0.log" Dec 05 18:32:46 crc kubenswrapper[4961]: I1205 18:32:46.412573 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn_bed15574-a82b-4a31-baa8-8ddfc4a93972/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:32:55 crc kubenswrapper[4961]: I1205 18:32:55.420514 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_4c503456-1649-444c-a321-687b4294d2fa/memcached/0.log" Dec 05 18:32:57 crc kubenswrapper[4961]: I1205 18:32:57.245814 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:32:57 crc kubenswrapper[4961]: I1205 18:32:57.246145 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:33:09 crc kubenswrapper[4961]: I1205 18:33:09.898700 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/util/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.102316 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/util/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.124443 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/pull/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.144742 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/pull/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.306097 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/pull/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.320261 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/util/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.346419 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/extract/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.453137 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-cnqj9_705eb884-eb46-4d59-86ee-c2f1587d5df4/kube-rbac-proxy/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.526006 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-nw6x8_406dca34-428b-493b-b564-511542c2bad6/kube-rbac-proxy/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.566879 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-cnqj9_705eb884-eb46-4d59-86ee-c2f1587d5df4/manager/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.669818 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-nw6x8_406dca34-428b-493b-b564-511542c2bad6/manager/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.731712 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-qnmsr_6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991/kube-rbac-proxy/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.769065 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-qnmsr_6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991/manager/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.904080 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-dlwzz_0b1fd140-fbf5-4d64-950b-b0bdcd07ec54/kube-rbac-proxy/0.log" Dec 05 18:33:10 crc kubenswrapper[4961]: I1205 18:33:10.996475 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-dlwzz_0b1fd140-fbf5-4d64-950b-b0bdcd07ec54/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.049118 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-95v4g_4d42ce43-3c27-4007-a20b-e0068beb2490/kube-rbac-proxy/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.104078 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-95v4g_4d42ce43-3c27-4007-a20b-e0068beb2490/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.155731 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gd5nk_af9a8d55-8cff-40e2-9f1b-bbd05c3eea80/kube-rbac-proxy/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.242834 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gd5nk_af9a8d55-8cff-40e2-9f1b-bbd05c3eea80/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.354723 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-84b9cfc694-lml7r_77f7ec48-3abf-4934-a703-fa3f5edfbd27/kube-rbac-proxy/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.509967 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-zjbvv_2bb86b5f-1ee1-48c0-bcc1-60ca583c1339/kube-rbac-proxy/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.553459 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-zjbvv_2bb86b5f-1ee1-48c0-bcc1-60ca583c1339/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.579608 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-84b9cfc694-lml7r_77f7ec48-3abf-4934-a703-fa3f5edfbd27/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.783707 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bnjfr_0499fdb9-20d5-445c-9ca0-4492287fbcc0/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.796155 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bnjfr_0499fdb9-20d5-445c-9ca0-4492287fbcc0/kube-rbac-proxy/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.918317 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-q2p4h_90df3fec-9bc9-48ca-a432-374c1f7e2002/kube-rbac-proxy/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.981643 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-q2p4h_90df3fec-9bc9-48ca-a432-374c1f7e2002/manager/0.log" Dec 05 18:33:11 crc kubenswrapper[4961]: I1205 18:33:11.984066 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-4whjc_f7757573-1085-4560-880c-3d9b36ce93f7/kube-rbac-proxy/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.092433 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-4whjc_f7757573-1085-4560-880c-3d9b36ce93f7/manager/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.145472 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-pszjn_f7ff9bd4-8a05-4a50-b38b-701451107b9f/kube-rbac-proxy/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.256054 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-pszjn_f7ff9bd4-8a05-4a50-b38b-701451107b9f/manager/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.340737 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hgv2r_e661480b-d6fd-4c16-9f03-f519092d05c6/kube-rbac-proxy/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.405980 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hgv2r_e661480b-d6fd-4c16-9f03-f519092d05c6/manager/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.502512 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2b6j2_a69e4847-13bc-4c1e-82a9-546fb11ad38d/kube-rbac-proxy/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.555138 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2b6j2_a69e4847-13bc-4c1e-82a9-546fb11ad38d/manager/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.649878 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl_ccea0c2d-817c-4895-b8a7-bf852bd12aa9/kube-rbac-proxy/0.log" Dec 05 18:33:12 crc kubenswrapper[4961]: I1205 18:33:12.716169 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl_ccea0c2d-817c-4895-b8a7-bf852bd12aa9/manager/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.123363 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-d885c5b7-b9mhp_8359ac81-7e2e-4a86-9052-2cba7e945d40/operator/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.137752 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-vblkt_2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b/registry-server/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.360534 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jghkw_8fd2cc5d-67e4-4b9a-9d0c-58993491bc08/kube-rbac-proxy/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.411760 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jghkw_8fd2cc5d-67e4-4b9a-9d0c-58993491bc08/manager/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.586713 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-zbcdb_7ff3fbef-1490-4ce9-b350-03a0a7182b78/kube-rbac-proxy/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.667334 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-zbcdb_7ff3fbef-1490-4ce9-b350-03a0a7182b78/manager/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.805177 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-777bfdfd44-xwwmn_619cb5c4-1a5a-4eb8-ad2d-28615e0dc607/manager/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.848934 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4dbp7_39ba1343-9933-483d-aef2-90e0ceb14c79/operator/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.875119 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-c8x5r_767519b7-2067-4fca-a96b-bf9b02e1b273/kube-rbac-proxy/0.log" Dec 05 18:33:13 crc kubenswrapper[4961]: I1205 18:33:13.974645 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-c8x5r_767519b7-2067-4fca-a96b-bf9b02e1b273/manager/0.log" Dec 05 18:33:14 crc kubenswrapper[4961]: I1205 18:33:14.021170 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-dsmtk_0770d71d-11ea-4b63-8a98-31521f395686/kube-rbac-proxy/0.log" Dec 05 18:33:14 crc kubenswrapper[4961]: I1205 18:33:14.130872 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-dsmtk_0770d71d-11ea-4b63-8a98-31521f395686/manager/0.log" Dec 05 18:33:14 crc kubenswrapper[4961]: I1205 18:33:14.192237 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4k7dd_e27c6cc8-ef18-421e-8a7b-1b6bb2227724/kube-rbac-proxy/0.log" Dec 05 18:33:14 crc kubenswrapper[4961]: I1205 18:33:14.206279 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4k7dd_e27c6cc8-ef18-421e-8a7b-1b6bb2227724/manager/0.log" Dec 05 18:33:14 crc kubenswrapper[4961]: I1205 18:33:14.295090 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2jz8d_c567a803-253f-4895-a504-caee7ba37c34/manager/0.log" Dec 05 18:33:14 crc kubenswrapper[4961]: I1205 18:33:14.302658 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2jz8d_c567a803-253f-4895-a504-caee7ba37c34/kube-rbac-proxy/0.log" Dec 05 18:33:27 crc kubenswrapper[4961]: I1205 18:33:27.246214 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:33:27 crc kubenswrapper[4961]: I1205 18:33:27.246714 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:33:31 crc kubenswrapper[4961]: I1205 18:33:31.621765 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5hmrk_601ba962-a964-472d-b481-4946fd7265b1/control-plane-machine-set-operator/0.log" Dec 05 18:33:31 crc kubenswrapper[4961]: I1205 18:33:31.778010 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6px8t_475e25de-63ce-4cae-8fc6-4c057d616247/kube-rbac-proxy/0.log" Dec 05 18:33:31 crc kubenswrapper[4961]: I1205 18:33:31.877560 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6px8t_475e25de-63ce-4cae-8fc6-4c057d616247/machine-api-operator/0.log" Dec 05 18:33:43 crc kubenswrapper[4961]: I1205 18:33:43.794425 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-w62zs_69ce6fc6-0715-4f9c-9c01-3db5dfcbf386/cert-manager-controller/0.log" Dec 05 18:33:43 crc kubenswrapper[4961]: I1205 18:33:43.982360 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-jxh56_3186034b-e42e-4c32-a5ad-942d6bbb0659/cert-manager-cainjector/0.log" Dec 05 18:33:44 crc kubenswrapper[4961]: I1205 18:33:44.028654 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-xzk6r_6a524c2b-8219-4740-b09e-0b855aa04c35/cert-manager-webhook/0.log" Dec 05 18:33:56 crc kubenswrapper[4961]: I1205 18:33:56.100914 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-h4zpz_c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7/nmstate-console-plugin/0.log" Dec 05 18:33:56 crc kubenswrapper[4961]: I1205 18:33:56.277500 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-sp6lw_5593be6a-3351-48eb-ac4b-e34d11ac0b49/nmstate-handler/0.log" Dec 05 18:33:56 crc kubenswrapper[4961]: I1205 18:33:56.367686 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-r585k_60b41a10-29ea-46ca-bc08-8c0473394b19/nmstate-metrics/0.log" Dec 05 18:33:56 crc kubenswrapper[4961]: I1205 18:33:56.371592 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-r585k_60b41a10-29ea-46ca-bc08-8c0473394b19/kube-rbac-proxy/0.log" Dec 05 18:33:56 crc kubenswrapper[4961]: I1205 18:33:56.505356 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-q2qxh_7d46084a-4838-43be-80f5-54ada85ff38f/nmstate-operator/0.log" Dec 05 18:33:56 crc kubenswrapper[4961]: I1205 18:33:56.592314 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-k6qjw_ea22d5fb-23bc-436e-8ab7-80e86c571c28/nmstate-webhook/0.log" Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.245397 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.245456 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.245511 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.246361 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.246428 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" gracePeriod=600 Dec 05 18:33:57 crc kubenswrapper[4961]: E1205 18:33:57.366610 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.386089 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" exitCode=0 Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.386134 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828"} Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.386169 4961 scope.go:117] "RemoveContainer" containerID="0b84cec64ffa522721ce8551ebbea613d7a1ad484230eb497756106e0f4ee577" Dec 05 18:33:57 crc kubenswrapper[4961]: I1205 18:33:57.387149 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:33:57 crc kubenswrapper[4961]: E1205 18:33:57.387475 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:34:07 crc kubenswrapper[4961]: I1205 18:34:07.864321 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:34:07 crc kubenswrapper[4961]: E1205 18:34:07.865007 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.192611 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-k6nwk_3f6c280d-42e8-40a5-b11c-e9a80f81125b/kube-rbac-proxy/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.214590 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-k6nwk_3f6c280d-42e8-40a5-b11c-e9a80f81125b/controller/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.382490 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.512986 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.529039 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.565706 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.598704 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.759050 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.788472 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.793707 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.811493 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.960844 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.978085 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.980450 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:34:10 crc kubenswrapper[4961]: I1205 18:34:10.998230 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/controller/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.121682 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/frr-metrics/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.187345 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/kube-rbac-proxy/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.213932 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/kube-rbac-proxy-frr/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.354703 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/reloader/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.426745 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-gsnw2_013262fd-338d-4a14-89f8-d682d09916f2/frr-k8s-webhook-server/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.628608 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-67f59cc659-4mmbn_1dcf69fa-3afe-4fb7-a64b-838dbab83937/manager/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.775447 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7c6bcfcd7d-5ng2j_6139f869-6350-4649-a6fd-a969bf96e18a/webhook-server/0.log" Dec 05 18:34:11 crc kubenswrapper[4961]: I1205 18:34:11.905603 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pcwhx_b1a0921b-0a54-4163-8931-0b6ef9dd1051/kube-rbac-proxy/0.log" Dec 05 18:34:12 crc kubenswrapper[4961]: I1205 18:34:12.398627 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pcwhx_b1a0921b-0a54-4163-8931-0b6ef9dd1051/speaker/0.log" Dec 05 18:34:12 crc kubenswrapper[4961]: I1205 18:34:12.535062 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/frr/0.log" Dec 05 18:34:20 crc kubenswrapper[4961]: I1205 18:34:20.863325 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:34:20 crc kubenswrapper[4961]: E1205 18:34:20.864148 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:34:26 crc kubenswrapper[4961]: I1205 18:34:26.837531 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/util/0.log" Dec 05 18:34:26 crc kubenswrapper[4961]: I1205 18:34:26.949086 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/util/0.log" Dec 05 18:34:26 crc kubenswrapper[4961]: I1205 18:34:26.982205 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/pull/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.023111 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/pull/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.187912 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/util/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.188892 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/pull/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.195308 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/extract/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.778325 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/util/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.942323 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/util/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.966305 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/pull/0.log" Dec 05 18:34:27 crc kubenswrapper[4961]: I1205 18:34:27.991524 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/pull/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.191243 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/extract/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.207466 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/pull/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.219816 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/util/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.355828 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-utilities/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.517556 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-utilities/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.517874 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-content/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.547969 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-content/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.674883 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-utilities/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.695107 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-content/0.log" Dec 05 18:34:28 crc kubenswrapper[4961]: I1205 18:34:28.921672 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/extract-utilities/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.111627 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/registry-server/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.128812 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/extract-utilities/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.147670 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/extract-content/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.156359 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/extract-content/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.273739 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/extract-utilities/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.328248 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/extract-content/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.804496 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-crqgk_5186f207-07d2-4325-be3d-3e21a3b5de5e/registry-server/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.932394 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-utilities/0.log" Dec 05 18:34:29 crc kubenswrapper[4961]: I1205 18:34:29.951552 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kxwjj_bb5a393d-4029-4474-937c-3ddf348254f2/marketplace-operator/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.199550 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-utilities/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.202093 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-content/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.236020 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-content/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.369451 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-utilities/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.373903 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-content/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.490674 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/registry-server/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.579788 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-utilities/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.756582 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-content/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.775304 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-utilities/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.786173 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-content/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.937566 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-utilities/0.log" Dec 05 18:34:30 crc kubenswrapper[4961]: I1205 18:34:30.979441 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-content/0.log" Dec 05 18:34:31 crc kubenswrapper[4961]: I1205 18:34:31.238428 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/registry-server/0.log" Dec 05 18:34:33 crc kubenswrapper[4961]: I1205 18:34:33.864432 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:34:33 crc kubenswrapper[4961]: E1205 18:34:33.865106 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:34:45 crc kubenswrapper[4961]: I1205 18:34:45.863547 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:34:45 crc kubenswrapper[4961]: E1205 18:34:45.864327 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:35:00 crc kubenswrapper[4961]: I1205 18:35:00.864813 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:35:00 crc kubenswrapper[4961]: E1205 18:35:00.865487 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:35:12 crc kubenswrapper[4961]: I1205 18:35:12.868834 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:35:12 crc kubenswrapper[4961]: E1205 18:35:12.870364 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:35:24 crc kubenswrapper[4961]: I1205 18:35:24.870355 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:35:24 crc kubenswrapper[4961]: E1205 18:35:24.871989 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:35:38 crc kubenswrapper[4961]: I1205 18:35:38.864587 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:35:38 crc kubenswrapper[4961]: E1205 18:35:38.865724 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:35:52 crc kubenswrapper[4961]: I1205 18:35:52.870334 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:35:52 crc kubenswrapper[4961]: E1205 18:35:52.871270 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.558018 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-fpfbc"] Dec 05 18:35:59 crc kubenswrapper[4961]: E1205 18:35:59.559451 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="extract-utilities" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.559476 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="extract-utilities" Dec 05 18:35:59 crc kubenswrapper[4961]: E1205 18:35:59.559504 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="extract-content" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.559512 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="extract-content" Dec 05 18:35:59 crc kubenswrapper[4961]: E1205 18:35:59.559527 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="registry-server" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.559534 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="registry-server" Dec 05 18:35:59 crc kubenswrapper[4961]: E1205 18:35:59.559547 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e4dfb722-6f7e-48bc-bfc9-36150192e418" containerName="container-00" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.559554 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e4dfb722-6f7e-48bc-bfc9-36150192e418" containerName="container-00" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.559738 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="e4dfb722-6f7e-48bc-bfc9-36150192e418" containerName="container-00" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.559751 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="3a61f445-54b4-43e8-8810-41e5fcfd2dd0" containerName="registry-server" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.561055 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.577768 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fpfbc"] Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.640382 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-catalog-content\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.640584 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-utilities\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.640644 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7m7z\" (UniqueName: \"kubernetes.io/projected/25e2f318-2b7b-48e7-aa32-57477c756471-kube-api-access-h7m7z\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.742341 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-catalog-content\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.742480 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-utilities\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.742512 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7m7z\" (UniqueName: \"kubernetes.io/projected/25e2f318-2b7b-48e7-aa32-57477c756471-kube-api-access-h7m7z\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.743121 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-utilities\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.743134 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-catalog-content\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.763978 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7m7z\" (UniqueName: \"kubernetes.io/projected/25e2f318-2b7b-48e7-aa32-57477c756471-kube-api-access-h7m7z\") pod \"certified-operators-fpfbc\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:35:59 crc kubenswrapper[4961]: I1205 18:35:59.882712 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:00 crc kubenswrapper[4961]: W1205 18:36:00.495592 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod25e2f318_2b7b_48e7_aa32_57477c756471.slice/crio-b508a27eb079919f230f96c1adb42958759545fa6d23f4574bed1a4991584eeb WatchSource:0}: Error finding container b508a27eb079919f230f96c1adb42958759545fa6d23f4574bed1a4991584eeb: Status 404 returned error can't find the container with id b508a27eb079919f230f96c1adb42958759545fa6d23f4574bed1a4991584eeb Dec 05 18:36:00 crc kubenswrapper[4961]: I1205 18:36:00.503422 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-fpfbc"] Dec 05 18:36:00 crc kubenswrapper[4961]: I1205 18:36:00.643073 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerStarted","Data":"b508a27eb079919f230f96c1adb42958759545fa6d23f4574bed1a4991584eeb"} Dec 05 18:36:01 crc kubenswrapper[4961]: I1205 18:36:01.653873 4961 generic.go:334] "Generic (PLEG): container finished" podID="25e2f318-2b7b-48e7-aa32-57477c756471" containerID="56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967" exitCode=0 Dec 05 18:36:01 crc kubenswrapper[4961]: I1205 18:36:01.653924 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerDied","Data":"56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967"} Dec 05 18:36:02 crc kubenswrapper[4961]: I1205 18:36:02.667863 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerStarted","Data":"065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4"} Dec 05 18:36:03 crc kubenswrapper[4961]: I1205 18:36:03.680547 4961 generic.go:334] "Generic (PLEG): container finished" podID="25e2f318-2b7b-48e7-aa32-57477c756471" containerID="065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4" exitCode=0 Dec 05 18:36:03 crc kubenswrapper[4961]: I1205 18:36:03.680612 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerDied","Data":"065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4"} Dec 05 18:36:03 crc kubenswrapper[4961]: I1205 18:36:03.863965 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:36:03 crc kubenswrapper[4961]: E1205 18:36:03.864573 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:36:04 crc kubenswrapper[4961]: I1205 18:36:04.690302 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerStarted","Data":"60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d"} Dec 05 18:36:04 crc kubenswrapper[4961]: I1205 18:36:04.720391 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-fpfbc" podStartSLOduration=3.270724339 podStartE2EDuration="5.720364361s" podCreationTimestamp="2025-12-05 18:35:59 +0000 UTC" firstStartedPulling="2025-12-05 18:36:01.656547509 +0000 UTC m=+3767.717697992" lastFinishedPulling="2025-12-05 18:36:04.106187501 +0000 UTC m=+3770.167338014" observedRunningTime="2025-12-05 18:36:04.713920964 +0000 UTC m=+3770.775071517" watchObservedRunningTime="2025-12-05 18:36:04.720364361 +0000 UTC m=+3770.781514844" Dec 05 18:36:09 crc kubenswrapper[4961]: I1205 18:36:09.883193 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:09 crc kubenswrapper[4961]: I1205 18:36:09.885108 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:09 crc kubenswrapper[4961]: I1205 18:36:09.954820 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:10 crc kubenswrapper[4961]: I1205 18:36:10.826521 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:10 crc kubenswrapper[4961]: I1205 18:36:10.883877 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fpfbc"] Dec 05 18:36:11 crc kubenswrapper[4961]: I1205 18:36:11.792558 4961 generic.go:334] "Generic (PLEG): container finished" podID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerID="0a30b434aa558007cc7989252780acc8bf5d18914bc9739be0c324522a445915" exitCode=0 Dec 05 18:36:11 crc kubenswrapper[4961]: I1205 18:36:11.792707 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ccglg/must-gather-zzwvt" event={"ID":"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c","Type":"ContainerDied","Data":"0a30b434aa558007cc7989252780acc8bf5d18914bc9739be0c324522a445915"} Dec 05 18:36:11 crc kubenswrapper[4961]: I1205 18:36:11.794166 4961 scope.go:117] "RemoveContainer" containerID="0a30b434aa558007cc7989252780acc8bf5d18914bc9739be0c324522a445915" Dec 05 18:36:12 crc kubenswrapper[4961]: I1205 18:36:12.183621 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-ccglg_must-gather-zzwvt_6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c/gather/0.log" Dec 05 18:36:12 crc kubenswrapper[4961]: I1205 18:36:12.799532 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-fpfbc" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="registry-server" containerID="cri-o://60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d" gracePeriod=2 Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.276312 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.435237 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-catalog-content\") pod \"25e2f318-2b7b-48e7-aa32-57477c756471\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.435529 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7m7z\" (UniqueName: \"kubernetes.io/projected/25e2f318-2b7b-48e7-aa32-57477c756471-kube-api-access-h7m7z\") pod \"25e2f318-2b7b-48e7-aa32-57477c756471\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.435648 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-utilities\") pod \"25e2f318-2b7b-48e7-aa32-57477c756471\" (UID: \"25e2f318-2b7b-48e7-aa32-57477c756471\") " Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.437584 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-utilities" (OuterVolumeSpecName: "utilities") pod "25e2f318-2b7b-48e7-aa32-57477c756471" (UID: "25e2f318-2b7b-48e7-aa32-57477c756471"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.460711 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e2f318-2b7b-48e7-aa32-57477c756471-kube-api-access-h7m7z" (OuterVolumeSpecName: "kube-api-access-h7m7z") pod "25e2f318-2b7b-48e7-aa32-57477c756471" (UID: "25e2f318-2b7b-48e7-aa32-57477c756471"). InnerVolumeSpecName "kube-api-access-h7m7z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.489696 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "25e2f318-2b7b-48e7-aa32-57477c756471" (UID: "25e2f318-2b7b-48e7-aa32-57477c756471"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.547924 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.547996 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7m7z\" (UniqueName: \"kubernetes.io/projected/25e2f318-2b7b-48e7-aa32-57477c756471-kube-api-access-h7m7z\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.548018 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/25e2f318-2b7b-48e7-aa32-57477c756471-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.810953 4961 generic.go:334] "Generic (PLEG): container finished" podID="25e2f318-2b7b-48e7-aa32-57477c756471" containerID="60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d" exitCode=0 Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.810999 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerDied","Data":"60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d"} Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.811062 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-fpfbc" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.811084 4961 scope.go:117] "RemoveContainer" containerID="60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.811072 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-fpfbc" event={"ID":"25e2f318-2b7b-48e7-aa32-57477c756471","Type":"ContainerDied","Data":"b508a27eb079919f230f96c1adb42958759545fa6d23f4574bed1a4991584eeb"} Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.845286 4961 scope.go:117] "RemoveContainer" containerID="065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.856350 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-fpfbc"] Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.872350 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-fpfbc"] Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.876378 4961 scope.go:117] "RemoveContainer" containerID="56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.915747 4961 scope.go:117] "RemoveContainer" containerID="60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d" Dec 05 18:36:13 crc kubenswrapper[4961]: E1205 18:36:13.916153 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d\": container with ID starting with 60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d not found: ID does not exist" containerID="60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.916193 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d"} err="failed to get container status \"60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d\": rpc error: code = NotFound desc = could not find container \"60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d\": container with ID starting with 60d365127bc4f6b1811518e0a5b1dd0c2cc650726a1610611585a791e832cf8d not found: ID does not exist" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.916220 4961 scope.go:117] "RemoveContainer" containerID="065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4" Dec 05 18:36:13 crc kubenswrapper[4961]: E1205 18:36:13.916636 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4\": container with ID starting with 065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4 not found: ID does not exist" containerID="065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.916673 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4"} err="failed to get container status \"065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4\": rpc error: code = NotFound desc = could not find container \"065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4\": container with ID starting with 065cdb1c6b4352546d66b7e664d9d79636ed84c6abdc4fdd52523cf5eb59faf4 not found: ID does not exist" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.916697 4961 scope.go:117] "RemoveContainer" containerID="56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967" Dec 05 18:36:13 crc kubenswrapper[4961]: E1205 18:36:13.917031 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967\": container with ID starting with 56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967 not found: ID does not exist" containerID="56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967" Dec 05 18:36:13 crc kubenswrapper[4961]: I1205 18:36:13.917056 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967"} err="failed to get container status \"56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967\": rpc error: code = NotFound desc = could not find container \"56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967\": container with ID starting with 56721f4446066f2a9bd54a940b2c51020e40a27658e7a4c7a3e91a3acde8c967 not found: ID does not exist" Dec 05 18:36:14 crc kubenswrapper[4961]: I1205 18:36:14.874984 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" path="/var/lib/kubelet/pods/25e2f318-2b7b-48e7-aa32-57477c756471/volumes" Dec 05 18:36:16 crc kubenswrapper[4961]: I1205 18:36:16.863871 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:36:16 crc kubenswrapper[4961]: E1205 18:36:16.864329 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.452027 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-ccglg/must-gather-zzwvt"] Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.452853 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-ccglg/must-gather-zzwvt" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="copy" containerID="cri-o://5cd4510b9488d1a4b6aedea3941c2fe5848abf743f50b927f59a5711c301a8e2" gracePeriod=2 Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.463408 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-ccglg/must-gather-zzwvt"] Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.883503 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-ccglg_must-gather-zzwvt_6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c/copy/0.log" Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.884324 4961 generic.go:334] "Generic (PLEG): container finished" podID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerID="5cd4510b9488d1a4b6aedea3941c2fe5848abf743f50b927f59a5711c301a8e2" exitCode=143 Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.884374 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95b0b3cf7ff24787e76cfa6ab8a1827456dac2cad2b590fa51a50234eee4da19" Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.962675 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-ccglg_must-gather-zzwvt_6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c/copy/0.log" Dec 05 18:36:20 crc kubenswrapper[4961]: I1205 18:36:20.963136 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.092505 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-must-gather-output\") pod \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.092686 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s6hxz\" (UniqueName: \"kubernetes.io/projected/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-kube-api-access-s6hxz\") pod \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\" (UID: \"6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c\") " Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.106138 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-kube-api-access-s6hxz" (OuterVolumeSpecName: "kube-api-access-s6hxz") pod "6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" (UID: "6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c"). InnerVolumeSpecName "kube-api-access-s6hxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.195893 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s6hxz\" (UniqueName: \"kubernetes.io/projected/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-kube-api-access-s6hxz\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.241701 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" (UID: "6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.297418 4961 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:21 crc kubenswrapper[4961]: I1205 18:36:21.892403 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ccglg/must-gather-zzwvt" Dec 05 18:36:22 crc kubenswrapper[4961]: I1205 18:36:22.878668 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" path="/var/lib/kubelet/pods/6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c/volumes" Dec 05 18:36:30 crc kubenswrapper[4961]: I1205 18:36:30.864060 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:36:30 crc kubenswrapper[4961]: E1205 18:36:30.864727 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:36:41 crc kubenswrapper[4961]: I1205 18:36:41.863727 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:36:41 crc kubenswrapper[4961]: E1205 18:36:41.864554 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.088766 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2xxct"] Dec 05 18:36:46 crc kubenswrapper[4961]: E1205 18:36:46.089961 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="copy" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.089976 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="copy" Dec 05 18:36:46 crc kubenswrapper[4961]: E1205 18:36:46.090002 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="gather" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.090008 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="gather" Dec 05 18:36:46 crc kubenswrapper[4961]: E1205 18:36:46.090016 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="extract-utilities" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.090024 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="extract-utilities" Dec 05 18:36:46 crc kubenswrapper[4961]: E1205 18:36:46.093668 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="registry-server" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.093689 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="registry-server" Dec 05 18:36:46 crc kubenswrapper[4961]: E1205 18:36:46.093730 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="extract-content" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.093737 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="extract-content" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.094042 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="copy" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.094067 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="6fa1d31b-a8a6-4bc5-acc3-ae1218fa144c" containerName="gather" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.094087 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="25e2f318-2b7b-48e7-aa32-57477c756471" containerName="registry-server" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.095464 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.111939 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xxct"] Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.217386 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6nf9\" (UniqueName: \"kubernetes.io/projected/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-kube-api-access-c6nf9\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.217574 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-catalog-content\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.217664 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-utilities\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.319198 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-catalog-content\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.319543 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-utilities\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.319679 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6nf9\" (UniqueName: \"kubernetes.io/projected/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-kube-api-access-c6nf9\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.320329 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-catalog-content\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.320418 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-utilities\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.352766 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6nf9\" (UniqueName: \"kubernetes.io/projected/e334c4db-efe0-4d34-84c1-a9bc5e2a57e5-kube-api-access-c6nf9\") pod \"community-operators-2xxct\" (UID: \"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5\") " pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:46 crc kubenswrapper[4961]: I1205 18:36:46.429741 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:47 crc kubenswrapper[4961]: I1205 18:36:46.999960 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xxct"] Dec 05 18:36:47 crc kubenswrapper[4961]: I1205 18:36:47.171563 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xxct" event={"ID":"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5","Type":"ContainerStarted","Data":"cbb79ea394d0307c0af578f0a884358bb9e80e05333b9226d6fbf7ad50b0cb81"} Dec 05 18:36:48 crc kubenswrapper[4961]: I1205 18:36:48.181860 4961 generic.go:334] "Generic (PLEG): container finished" podID="e334c4db-efe0-4d34-84c1-a9bc5e2a57e5" containerID="777d5a353f93b17b6c1cf489a7617ea507de0c6315bc15b0d34fb2191eb77cbe" exitCode=0 Dec 05 18:36:48 crc kubenswrapper[4961]: I1205 18:36:48.181968 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xxct" event={"ID":"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5","Type":"ContainerDied","Data":"777d5a353f93b17b6c1cf489a7617ea507de0c6315bc15b0d34fb2191eb77cbe"} Dec 05 18:36:48 crc kubenswrapper[4961]: I1205 18:36:48.183808 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:36:52 crc kubenswrapper[4961]: I1205 18:36:52.226223 4961 generic.go:334] "Generic (PLEG): container finished" podID="e334c4db-efe0-4d34-84c1-a9bc5e2a57e5" containerID="d73cc727ac302c4349729f803c2fae2d0a644a5f9dc3ebeb1c9e88db9e0ba7ae" exitCode=0 Dec 05 18:36:52 crc kubenswrapper[4961]: I1205 18:36:52.226336 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xxct" event={"ID":"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5","Type":"ContainerDied","Data":"d73cc727ac302c4349729f803c2fae2d0a644a5f9dc3ebeb1c9e88db9e0ba7ae"} Dec 05 18:36:53 crc kubenswrapper[4961]: I1205 18:36:53.241353 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2xxct" event={"ID":"e334c4db-efe0-4d34-84c1-a9bc5e2a57e5","Type":"ContainerStarted","Data":"e3b64c950d99c1b3fda554bde433ed1648a4497db90467b257f0366cc26ba747"} Dec 05 18:36:53 crc kubenswrapper[4961]: I1205 18:36:53.270233 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2xxct" podStartSLOduration=2.73010236 podStartE2EDuration="7.270203853s" podCreationTimestamp="2025-12-05 18:36:46 +0000 UTC" firstStartedPulling="2025-12-05 18:36:48.183389748 +0000 UTC m=+3814.244540241" lastFinishedPulling="2025-12-05 18:36:52.723491261 +0000 UTC m=+3818.784641734" observedRunningTime="2025-12-05 18:36:53.259248096 +0000 UTC m=+3819.320398619" watchObservedRunningTime="2025-12-05 18:36:53.270203853 +0000 UTC m=+3819.331354346" Dec 05 18:36:53 crc kubenswrapper[4961]: I1205 18:36:53.864069 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:36:53 crc kubenswrapper[4961]: E1205 18:36:53.864883 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:36:56 crc kubenswrapper[4961]: I1205 18:36:56.430462 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:56 crc kubenswrapper[4961]: I1205 18:36:56.430845 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:56 crc kubenswrapper[4961]: I1205 18:36:56.500398 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:57 crc kubenswrapper[4961]: I1205 18:36:57.335838 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2xxct" Dec 05 18:36:57 crc kubenswrapper[4961]: I1205 18:36:57.882932 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2xxct"] Dec 05 18:36:57 crc kubenswrapper[4961]: I1205 18:36:57.940030 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-crqgk"] Dec 05 18:36:57 crc kubenswrapper[4961]: I1205 18:36:57.940425 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-crqgk" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="registry-server" containerID="cri-o://d5ce64c83d47e4cbe5cdcc6fbfeb0afcdb53f29d6e2c1a0464c3206e87c405c8" gracePeriod=2 Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.313961 4961 generic.go:334] "Generic (PLEG): container finished" podID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerID="d5ce64c83d47e4cbe5cdcc6fbfeb0afcdb53f29d6e2c1a0464c3206e87c405c8" exitCode=0 Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.315078 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crqgk" event={"ID":"5186f207-07d2-4325-be3d-3e21a3b5de5e","Type":"ContainerDied","Data":"d5ce64c83d47e4cbe5cdcc6fbfeb0afcdb53f29d6e2c1a0464c3206e87c405c8"} Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.428884 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crqgk" Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.600833 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-utilities\") pod \"5186f207-07d2-4325-be3d-3e21a3b5de5e\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.601619 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-catalog-content\") pod \"5186f207-07d2-4325-be3d-3e21a3b5de5e\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.601887 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8d7fz\" (UniqueName: \"kubernetes.io/projected/5186f207-07d2-4325-be3d-3e21a3b5de5e-kube-api-access-8d7fz\") pod \"5186f207-07d2-4325-be3d-3e21a3b5de5e\" (UID: \"5186f207-07d2-4325-be3d-3e21a3b5de5e\") " Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.602104 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-utilities" (OuterVolumeSpecName: "utilities") pod "5186f207-07d2-4325-be3d-3e21a3b5de5e" (UID: "5186f207-07d2-4325-be3d-3e21a3b5de5e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.602518 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.608694 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5186f207-07d2-4325-be3d-3e21a3b5de5e-kube-api-access-8d7fz" (OuterVolumeSpecName: "kube-api-access-8d7fz") pod "5186f207-07d2-4325-be3d-3e21a3b5de5e" (UID: "5186f207-07d2-4325-be3d-3e21a3b5de5e"). InnerVolumeSpecName "kube-api-access-8d7fz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.661010 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5186f207-07d2-4325-be3d-3e21a3b5de5e" (UID: "5186f207-07d2-4325-be3d-3e21a3b5de5e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.704484 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5186f207-07d2-4325-be3d-3e21a3b5de5e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:58 crc kubenswrapper[4961]: I1205 18:36:58.704519 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8d7fz\" (UniqueName: \"kubernetes.io/projected/5186f207-07d2-4325-be3d-3e21a3b5de5e-kube-api-access-8d7fz\") on node \"crc\" DevicePath \"\"" Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.329217 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-crqgk" event={"ID":"5186f207-07d2-4325-be3d-3e21a3b5de5e","Type":"ContainerDied","Data":"b77c2d822d76a92e8b1079ecb7cd8375f3826c1d0ad083590779c6a991e5f99f"} Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.329257 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-crqgk" Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.329636 4961 scope.go:117] "RemoveContainer" containerID="d5ce64c83d47e4cbe5cdcc6fbfeb0afcdb53f29d6e2c1a0464c3206e87c405c8" Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.358977 4961 scope.go:117] "RemoveContainer" containerID="e01da39d74981bef76d6832036d0eedbbcbbf26ffc6bf9a8d93d1442bd2c84b1" Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.363046 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-crqgk"] Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.371876 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-crqgk"] Dec 05 18:36:59 crc kubenswrapper[4961]: I1205 18:36:59.382499 4961 scope.go:117] "RemoveContainer" containerID="6c839e9c03d90d0d811aa453985a1cae2f7da682332a3b47c059f84c51cb2927" Dec 05 18:37:00 crc kubenswrapper[4961]: I1205 18:37:00.882526 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" path="/var/lib/kubelet/pods/5186f207-07d2-4325-be3d-3e21a3b5de5e/volumes" Dec 05 18:37:05 crc kubenswrapper[4961]: I1205 18:37:05.864062 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:37:05 crc kubenswrapper[4961]: E1205 18:37:05.865454 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:37:16 crc kubenswrapper[4961]: I1205 18:37:16.864199 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:37:16 crc kubenswrapper[4961]: E1205 18:37:16.865093 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:37:30 crc kubenswrapper[4961]: I1205 18:37:30.863801 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:37:30 crc kubenswrapper[4961]: E1205 18:37:30.864531 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:37:41 crc kubenswrapper[4961]: I1205 18:37:41.863848 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:37:41 crc kubenswrapper[4961]: E1205 18:37:41.864893 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:37:45 crc kubenswrapper[4961]: I1205 18:37:45.482240 4961 scope.go:117] "RemoveContainer" containerID="5cd4510b9488d1a4b6aedea3941c2fe5848abf743f50b927f59a5711c301a8e2" Dec 05 18:37:45 crc kubenswrapper[4961]: I1205 18:37:45.511593 4961 scope.go:117] "RemoveContainer" containerID="02caa040780271ac3a7df31143866f5b313b38acdbba195c884fd7dc51d3594c" Dec 05 18:37:45 crc kubenswrapper[4961]: I1205 18:37:45.560801 4961 scope.go:117] "RemoveContainer" containerID="0a30b434aa558007cc7989252780acc8bf5d18914bc9739be0c324522a445915" Dec 05 18:37:54 crc kubenswrapper[4961]: I1205 18:37:54.872114 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:37:54 crc kubenswrapper[4961]: E1205 18:37:54.873120 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:38:08 crc kubenswrapper[4961]: I1205 18:38:08.863639 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:38:08 crc kubenswrapper[4961]: E1205 18:38:08.864489 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:38:23 crc kubenswrapper[4961]: I1205 18:38:23.864220 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:38:23 crc kubenswrapper[4961]: E1205 18:38:23.864998 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:38:38 crc kubenswrapper[4961]: I1205 18:38:38.864607 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:38:38 crc kubenswrapper[4961]: E1205 18:38:38.865928 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:38:45 crc kubenswrapper[4961]: I1205 18:38:45.723556 4961 scope.go:117] "RemoveContainer" containerID="9f9b6989a7365371e805afad607ad815fdd2668f1bf33e5bfb08bb410a94390b" Dec 05 18:38:50 crc kubenswrapper[4961]: I1205 18:38:50.864242 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:38:50 crc kubenswrapper[4961]: E1205 18:38:50.866586 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:39:03 crc kubenswrapper[4961]: I1205 18:39:03.865624 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:39:04 crc kubenswrapper[4961]: I1205 18:39:04.177798 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"f1a110177a9eeba85f570ef0c42f98bf4014c9fcc0be0c4a67f6cf96d7337c8c"} Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.756401 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vl9lg/must-gather-24cfw"] Dec 05 18:39:22 crc kubenswrapper[4961]: E1205 18:39:22.757317 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="registry-server" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.757331 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="registry-server" Dec 05 18:39:22 crc kubenswrapper[4961]: E1205 18:39:22.757346 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="extract-utilities" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.757355 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="extract-utilities" Dec 05 18:39:22 crc kubenswrapper[4961]: E1205 18:39:22.757368 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="extract-content" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.757374 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="extract-content" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.757566 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="5186f207-07d2-4325-be3d-3e21a3b5de5e" containerName="registry-server" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.758612 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.761999 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-vl9lg"/"openshift-service-ca.crt" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.762202 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-vl9lg"/"kube-root-ca.crt" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.782538 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-vl9lg/must-gather-24cfw"] Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.829341 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-must-gather-output\") pod \"must-gather-24cfw\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.829384 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58mgh\" (UniqueName: \"kubernetes.io/projected/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-kube-api-access-58mgh\") pod \"must-gather-24cfw\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.937152 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-must-gather-output\") pod \"must-gather-24cfw\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.937207 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58mgh\" (UniqueName: \"kubernetes.io/projected/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-kube-api-access-58mgh\") pod \"must-gather-24cfw\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.939735 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-must-gather-output\") pod \"must-gather-24cfw\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:22 crc kubenswrapper[4961]: I1205 18:39:22.975228 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58mgh\" (UniqueName: \"kubernetes.io/projected/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-kube-api-access-58mgh\") pod \"must-gather-24cfw\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:23 crc kubenswrapper[4961]: I1205 18:39:23.084823 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:39:23 crc kubenswrapper[4961]: I1205 18:39:23.556328 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-vl9lg/must-gather-24cfw"] Dec 05 18:39:23 crc kubenswrapper[4961]: W1205 18:39:23.563010 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc7e452cb_e813_4aeb_99f2_bbfa334ecdc2.slice/crio-a3f2d977f6297e21926bd46ad72f92f00dc8c437329ce84f0cb21038b84cbc7f WatchSource:0}: Error finding container a3f2d977f6297e21926bd46ad72f92f00dc8c437329ce84f0cb21038b84cbc7f: Status 404 returned error can't find the container with id a3f2d977f6297e21926bd46ad72f92f00dc8c437329ce84f0cb21038b84cbc7f Dec 05 18:39:24 crc kubenswrapper[4961]: I1205 18:39:24.392935 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/must-gather-24cfw" event={"ID":"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2","Type":"ContainerStarted","Data":"060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c"} Dec 05 18:39:24 crc kubenswrapper[4961]: I1205 18:39:24.394346 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/must-gather-24cfw" event={"ID":"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2","Type":"ContainerStarted","Data":"fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef"} Dec 05 18:39:24 crc kubenswrapper[4961]: I1205 18:39:24.394430 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/must-gather-24cfw" event={"ID":"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2","Type":"ContainerStarted","Data":"a3f2d977f6297e21926bd46ad72f92f00dc8c437329ce84f0cb21038b84cbc7f"} Dec 05 18:39:24 crc kubenswrapper[4961]: I1205 18:39:24.410144 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-vl9lg/must-gather-24cfw" podStartSLOduration=2.410120808 podStartE2EDuration="2.410120808s" podCreationTimestamp="2025-12-05 18:39:22 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:39:24.409147814 +0000 UTC m=+3970.470298297" watchObservedRunningTime="2025-12-05 18:39:24.410120808 +0000 UTC m=+3970.471271301" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.059092 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-7wfnl"] Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.061148 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.063143 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-vl9lg"/"default-dockercfg-krnqf" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.142218 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-host\") pod \"crc-debug-7wfnl\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.142264 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nxb5z\" (UniqueName: \"kubernetes.io/projected/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-kube-api-access-nxb5z\") pod \"crc-debug-7wfnl\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.243515 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-host\") pod \"crc-debug-7wfnl\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.243575 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nxb5z\" (UniqueName: \"kubernetes.io/projected/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-kube-api-access-nxb5z\") pod \"crc-debug-7wfnl\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.243673 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-host\") pod \"crc-debug-7wfnl\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.265097 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nxb5z\" (UniqueName: \"kubernetes.io/projected/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-kube-api-access-nxb5z\") pod \"crc-debug-7wfnl\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.379502 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:39:28 crc kubenswrapper[4961]: I1205 18:39:28.429854 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" event={"ID":"aa3e5e33-caa5-4e7c-ae03-d77f12789c50","Type":"ContainerStarted","Data":"7fdfb0e14da0236acc01578ad35f646fdbe886eaddde9ce9edc9b81b0410fba1"} Dec 05 18:39:29 crc kubenswrapper[4961]: I1205 18:39:29.439257 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" event={"ID":"aa3e5e33-caa5-4e7c-ae03-d77f12789c50","Type":"ContainerStarted","Data":"b82863ff3c4580f3e601ebcb8588270111e76323a0d3aef7d32b20891ba8dc56"} Dec 05 18:39:29 crc kubenswrapper[4961]: I1205 18:39:29.453930 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" podStartSLOduration=1.453908431 podStartE2EDuration="1.453908431s" podCreationTimestamp="2025-12-05 18:39:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-12-05 18:39:29.449824301 +0000 UTC m=+3975.510974764" watchObservedRunningTime="2025-12-05 18:39:29.453908431 +0000 UTC m=+3975.515058904" Dec 05 18:40:01 crc kubenswrapper[4961]: I1205 18:40:01.731196 4961 generic.go:334] "Generic (PLEG): container finished" podID="aa3e5e33-caa5-4e7c-ae03-d77f12789c50" containerID="b82863ff3c4580f3e601ebcb8588270111e76323a0d3aef7d32b20891ba8dc56" exitCode=0 Dec 05 18:40:01 crc kubenswrapper[4961]: I1205 18:40:01.731276 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" event={"ID":"aa3e5e33-caa5-4e7c-ae03-d77f12789c50","Type":"ContainerDied","Data":"b82863ff3c4580f3e601ebcb8588270111e76323a0d3aef7d32b20891ba8dc56"} Dec 05 18:40:02 crc kubenswrapper[4961]: I1205 18:40:02.867423 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:40:02 crc kubenswrapper[4961]: I1205 18:40:02.903206 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-7wfnl"] Dec 05 18:40:02 crc kubenswrapper[4961]: I1205 18:40:02.912589 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-7wfnl"] Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.035704 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-host\") pod \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.035843 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-host" (OuterVolumeSpecName: "host") pod "aa3e5e33-caa5-4e7c-ae03-d77f12789c50" (UID: "aa3e5e33-caa5-4e7c-ae03-d77f12789c50"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.035936 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nxb5z\" (UniqueName: \"kubernetes.io/projected/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-kube-api-access-nxb5z\") pod \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\" (UID: \"aa3e5e33-caa5-4e7c-ae03-d77f12789c50\") " Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.036462 4961 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.046047 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-kube-api-access-nxb5z" (OuterVolumeSpecName: "kube-api-access-nxb5z") pod "aa3e5e33-caa5-4e7c-ae03-d77f12789c50" (UID: "aa3e5e33-caa5-4e7c-ae03-d77f12789c50"). InnerVolumeSpecName "kube-api-access-nxb5z". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.138206 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nxb5z\" (UniqueName: \"kubernetes.io/projected/aa3e5e33-caa5-4e7c-ae03-d77f12789c50-kube-api-access-nxb5z\") on node \"crc\" DevicePath \"\"" Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.751942 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7fdfb0e14da0236acc01578ad35f646fdbe886eaddde9ce9edc9b81b0410fba1" Dec 05 18:40:03 crc kubenswrapper[4961]: I1205 18:40:03.752239 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-7wfnl" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.080372 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-mjhsp"] Dec 05 18:40:04 crc kubenswrapper[4961]: E1205 18:40:04.080915 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa3e5e33-caa5-4e7c-ae03-d77f12789c50" containerName="container-00" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.080931 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa3e5e33-caa5-4e7c-ae03-d77f12789c50" containerName="container-00" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.081178 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa3e5e33-caa5-4e7c-ae03-d77f12789c50" containerName="container-00" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.082056 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.084226 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-vl9lg"/"default-dockercfg-krnqf" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.258360 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p6g84\" (UniqueName: \"kubernetes.io/projected/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-kube-api-access-p6g84\") pod \"crc-debug-mjhsp\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.258673 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-host\") pod \"crc-debug-mjhsp\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.360590 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p6g84\" (UniqueName: \"kubernetes.io/projected/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-kube-api-access-p6g84\") pod \"crc-debug-mjhsp\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.360901 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-host\") pod \"crc-debug-mjhsp\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.361028 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-host\") pod \"crc-debug-mjhsp\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.383749 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p6g84\" (UniqueName: \"kubernetes.io/projected/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-kube-api-access-p6g84\") pod \"crc-debug-mjhsp\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.399529 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.767357 4961 generic.go:334] "Generic (PLEG): container finished" podID="dd5e8bf9-ab76-42ee-93dd-58b5889cb557" containerID="3adc0870759e73be2c9c7a9ebaef101d5b6bcefcd45789a0171f5ac2f1faa299" exitCode=0 Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.767401 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" event={"ID":"dd5e8bf9-ab76-42ee-93dd-58b5889cb557","Type":"ContainerDied","Data":"3adc0870759e73be2c9c7a9ebaef101d5b6bcefcd45789a0171f5ac2f1faa299"} Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.767427 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" event={"ID":"dd5e8bf9-ab76-42ee-93dd-58b5889cb557","Type":"ContainerStarted","Data":"db98a46416b484640212e5e781929bc2a19f27f6cb67f566cc6ad786b6d59400"} Dec 05 18:40:04 crc kubenswrapper[4961]: I1205 18:40:04.880567 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa3e5e33-caa5-4e7c-ae03-d77f12789c50" path="/var/lib/kubelet/pods/aa3e5e33-caa5-4e7c-ae03-d77f12789c50/volumes" Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.258891 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-mjhsp"] Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.267769 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-mjhsp"] Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.883333 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.986600 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-host\") pod \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.986721 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p6g84\" (UniqueName: \"kubernetes.io/projected/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-kube-api-access-p6g84\") pod \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\" (UID: \"dd5e8bf9-ab76-42ee-93dd-58b5889cb557\") " Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.986734 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-host" (OuterVolumeSpecName: "host") pod "dd5e8bf9-ab76-42ee-93dd-58b5889cb557" (UID: "dd5e8bf9-ab76-42ee-93dd-58b5889cb557"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.987286 4961 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:40:05 crc kubenswrapper[4961]: I1205 18:40:05.992343 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-kube-api-access-p6g84" (OuterVolumeSpecName: "kube-api-access-p6g84") pod "dd5e8bf9-ab76-42ee-93dd-58b5889cb557" (UID: "dd5e8bf9-ab76-42ee-93dd-58b5889cb557"). InnerVolumeSpecName "kube-api-access-p6g84". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.089509 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p6g84\" (UniqueName: \"kubernetes.io/projected/dd5e8bf9-ab76-42ee-93dd-58b5889cb557-kube-api-access-p6g84\") on node \"crc\" DevicePath \"\"" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.438754 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-gnkbc"] Dec 05 18:40:06 crc kubenswrapper[4961]: E1205 18:40:06.439487 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="dd5e8bf9-ab76-42ee-93dd-58b5889cb557" containerName="container-00" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.439504 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="dd5e8bf9-ab76-42ee-93dd-58b5889cb557" containerName="container-00" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.439812 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="dd5e8bf9-ab76-42ee-93dd-58b5889cb557" containerName="container-00" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.449577 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.622923 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c28lz\" (UniqueName: \"kubernetes.io/projected/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-kube-api-access-c28lz\") pod \"crc-debug-gnkbc\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.624642 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-host\") pod \"crc-debug-gnkbc\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.725965 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c28lz\" (UniqueName: \"kubernetes.io/projected/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-kube-api-access-c28lz\") pod \"crc-debug-gnkbc\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.726079 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-host\") pod \"crc-debug-gnkbc\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.726214 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-host\") pod \"crc-debug-gnkbc\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.744906 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c28lz\" (UniqueName: \"kubernetes.io/projected/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-kube-api-access-c28lz\") pod \"crc-debug-gnkbc\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.767608 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.792678 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="db98a46416b484640212e5e781929bc2a19f27f6cb67f566cc6ad786b6d59400" Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.792717 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:06 crc kubenswrapper[4961]: W1205 18:40:06.796641 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8e89c27a_ad06_499b_875d_d9cea0cf4ad1.slice/crio-c3f2776b527fd0cdee669094c1988c99b978cd0021df4b6d1f7d2f0ca086a179 WatchSource:0}: Error finding container c3f2776b527fd0cdee669094c1988c99b978cd0021df4b6d1f7d2f0ca086a179: Status 404 returned error can't find the container with id c3f2776b527fd0cdee669094c1988c99b978cd0021df4b6d1f7d2f0ca086a179 Dec 05 18:40:06 crc kubenswrapper[4961]: I1205 18:40:06.875272 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="dd5e8bf9-ab76-42ee-93dd-58b5889cb557" path="/var/lib/kubelet/pods/dd5e8bf9-ab76-42ee-93dd-58b5889cb557/volumes" Dec 05 18:40:07 crc kubenswrapper[4961]: I1205 18:40:07.806830 4961 generic.go:334] "Generic (PLEG): container finished" podID="8e89c27a-ad06-499b-875d-d9cea0cf4ad1" containerID="9cfb79810dbb0ceec84c3d8c721e6655d302c3e66cfc302dacc79e7cac1c231d" exitCode=0 Dec 05 18:40:07 crc kubenswrapper[4961]: I1205 18:40:07.806936 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" event={"ID":"8e89c27a-ad06-499b-875d-d9cea0cf4ad1","Type":"ContainerDied","Data":"9cfb79810dbb0ceec84c3d8c721e6655d302c3e66cfc302dacc79e7cac1c231d"} Dec 05 18:40:07 crc kubenswrapper[4961]: I1205 18:40:07.807163 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" event={"ID":"8e89c27a-ad06-499b-875d-d9cea0cf4ad1","Type":"ContainerStarted","Data":"c3f2776b527fd0cdee669094c1988c99b978cd0021df4b6d1f7d2f0ca086a179"} Dec 05 18:40:07 crc kubenswrapper[4961]: I1205 18:40:07.847988 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-gnkbc"] Dec 05 18:40:07 crc kubenswrapper[4961]: I1205 18:40:07.856042 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vl9lg/crc-debug-gnkbc"] Dec 05 18:40:08 crc kubenswrapper[4961]: I1205 18:40:08.948367 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:08 crc kubenswrapper[4961]: I1205 18:40:08.986615 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c28lz\" (UniqueName: \"kubernetes.io/projected/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-kube-api-access-c28lz\") pod \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " Dec 05 18:40:08 crc kubenswrapper[4961]: I1205 18:40:08.986768 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-host\") pod \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\" (UID: \"8e89c27a-ad06-499b-875d-d9cea0cf4ad1\") " Dec 05 18:40:08 crc kubenswrapper[4961]: I1205 18:40:08.987078 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-host" (OuterVolumeSpecName: "host") pod "8e89c27a-ad06-499b-875d-d9cea0cf4ad1" (UID: "8e89c27a-ad06-499b-875d-d9cea0cf4ad1"). InnerVolumeSpecName "host". PluginName "kubernetes.io/host-path", VolumeGidValue "" Dec 05 18:40:08 crc kubenswrapper[4961]: I1205 18:40:08.994933 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-kube-api-access-c28lz" (OuterVolumeSpecName: "kube-api-access-c28lz") pod "8e89c27a-ad06-499b-875d-d9cea0cf4ad1" (UID: "8e89c27a-ad06-499b-875d-d9cea0cf4ad1"). InnerVolumeSpecName "kube-api-access-c28lz". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:40:09 crc kubenswrapper[4961]: I1205 18:40:09.089656 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c28lz\" (UniqueName: \"kubernetes.io/projected/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-kube-api-access-c28lz\") on node \"crc\" DevicePath \"\"" Dec 05 18:40:09 crc kubenswrapper[4961]: I1205 18:40:09.089706 4961 reconciler_common.go:293] "Volume detached for volume \"host\" (UniqueName: \"kubernetes.io/host-path/8e89c27a-ad06-499b-875d-d9cea0cf4ad1-host\") on node \"crc\" DevicePath \"\"" Dec 05 18:40:09 crc kubenswrapper[4961]: I1205 18:40:09.841469 4961 scope.go:117] "RemoveContainer" containerID="9cfb79810dbb0ceec84c3d8c721e6655d302c3e66cfc302dacc79e7cac1c231d" Dec 05 18:40:09 crc kubenswrapper[4961]: I1205 18:40:09.841900 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-gnkbc" Dec 05 18:40:10 crc kubenswrapper[4961]: I1205 18:40:10.875186 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8e89c27a-ad06-499b-875d-d9cea0cf4ad1" path="/var/lib/kubelet/pods/8e89c27a-ad06-499b-875d-d9cea0cf4ad1/volumes" Dec 05 18:40:35 crc kubenswrapper[4961]: I1205 18:40:35.408194 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5d86bc7b8-5vlfw_c8daeb2b-9caa-4a76-b22a-e3320f0235a0/barbican-api/0.log" Dec 05 18:40:35 crc kubenswrapper[4961]: I1205 18:40:35.617325 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-api-5d86bc7b8-5vlfw_c8daeb2b-9caa-4a76-b22a-e3320f0235a0/barbican-api-log/0.log" Dec 05 18:40:35 crc kubenswrapper[4961]: I1205 18:40:35.680183 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c7dc5bc58-54zmv_9db816ec-0b58-40b0-a063-974df541802b/barbican-keystone-listener/0.log" Dec 05 18:40:35 crc kubenswrapper[4961]: I1205 18:40:35.764514 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-keystone-listener-7c7dc5bc58-54zmv_9db816ec-0b58-40b0-a063-974df541802b/barbican-keystone-listener-log/0.log" Dec 05 18:40:35 crc kubenswrapper[4961]: I1205 18:40:35.864226 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-56bf9fd9dc-6zsc7_1b291fd5-a251-45db-8c2b-334f43909f1f/barbican-worker/0.log" Dec 05 18:40:35 crc kubenswrapper[4961]: I1205 18:40:35.869203 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_barbican-worker-56bf9fd9dc-6zsc7_1b291fd5-a251-45db-8c2b-334f43909f1f/barbican-worker-log/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.063205 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_bootstrap-edpm-deployment-openstack-edpm-ipam-84hml_b9050523-5c05-47cd-9e51-85703488427f/bootstrap-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.159487 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/ceilometer-central-agent/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.204356 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/ceilometer-notification-agent/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.274341 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/proxy-httpd/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.299473 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ceilometer-0_a61be49f-b67b-4cd9-8790-12fe7dfde50b/sg-core/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.458419 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7aa62ef2-e824-4f99-98bc-d4049b51ab7e/cinder-api/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.509661 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-api-0_7aa62ef2-e824-4f99-98bc-d4049b51ab7e/cinder-api-log/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.668745 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9188090c-6109-45b2-b63c-1656ebb2ad0e/cinder-scheduler/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.742646 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_cinder-scheduler-0_9188090c-6109-45b2-b63c-1656ebb2ad0e/probe/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.752340 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-network-edpm-deployment-openstack-edpm-ipam-8vfn2_8228f2d3-22c7-4bfd-bd4d-b17ed9ba9ffa/configure-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.890218 4961 pod_container_manager_linux.go:210] "Failed to delete cgroup paths" cgroupName=["kubepods","besteffort","poddd5e8bf9-ab76-42ee-93dd-58b5889cb557"] err="unable to destroy cgroup paths for cgroup [kubepods besteffort poddd5e8bf9-ab76-42ee-93dd-58b5889cb557] : Timed out while waiting for systemd to remove kubepods-besteffort-poddd5e8bf9_ab76_42ee_93dd_58b5889cb557.slice" Dec 05 18:40:36 crc kubenswrapper[4961]: E1205 18:40:36.890521 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to delete cgroup paths for [kubepods besteffort poddd5e8bf9-ab76-42ee-93dd-58b5889cb557] : unable to destroy cgroup paths for cgroup [kubepods besteffort poddd5e8bf9-ab76-42ee-93dd-58b5889cb557] : Timed out while waiting for systemd to remove kubepods-besteffort-poddd5e8bf9_ab76_42ee_93dd_58b5889cb557.slice" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" podUID="dd5e8bf9-ab76-42ee-93dd-58b5889cb557" Dec 05 18:40:36 crc kubenswrapper[4961]: I1205 18:40:36.986631 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_configure-os-edpm-deployment-openstack-edpm-ipam-592hz_877c43bb-852d-4f38-8322-8c72200ca936/configure-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.013147 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-rw7cz_1270f427-e53f-410f-b9ae-9cf12c5dffe1/init/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.099188 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/crc-debug-mjhsp" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.135969 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-rw7cz_1270f427-e53f-410f-b9ae-9cf12c5dffe1/init/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.192010 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_dnsmasq-dns-8c6f6df99-rw7cz_1270f427-e53f-410f-b9ae-9cf12c5dffe1/dnsmasq-dns/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.233714 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_download-cache-edpm-deployment-openstack-edpm-ipam-7trn8_c2f474d4-f96a-45cd-9432-b90f703a6b81/download-cache-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.395151 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96/glance-httpd/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.464925 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-external-api-0_e2f4f3fb-74f5-4ab8-9e2f-9acc5868bb96/glance-log/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.594213 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3ac5ebd0-a089-428c-a698-cbd1f6c50c57/glance-httpd/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.611887 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_glance-default-internal-api-0_3ac5ebd0-a089-428c-a698-cbd1f6c50c57/glance-log/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.725768 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-78bb69647d-95ptt_a3dcddde-25f9-446a-8d5f-d9468cfa6940/horizon/0.log" Dec 05 18:40:37 crc kubenswrapper[4961]: I1205 18:40:37.919896 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-certs-edpm-deployment-openstack-edpm-ipam-925jr_b8b70414-5c28-428e-90be-4e5d82070919/install-certs-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.163798 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_install-os-edpm-deployment-openstack-edpm-ipam-7lbk7_0e516b37-5d75-47c4-af9c-438a41abf158/install-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.196278 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_horizon-78bb69647d-95ptt_a3dcddde-25f9-446a-8d5f-d9468cfa6940/horizon-log/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.381274 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-7896fbd4bd-l9rg6_3e200718-485e-49b6-b4ab-8311a9178f66/keystone-api/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.395124 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_keystone-cron-29415961-66rdg_1f248e91-833b-4136-ad16-f32f9aff9513/keystone-cron/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.530569 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_kube-state-metrics-0_e5e6ba28-82ee-411a-a9e1-46db404bdff6/kube-state-metrics/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.601029 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_libvirt-edpm-deployment-openstack-edpm-ipam-8m5lb_350a4c11-1d87-4f63-8ec8-c808de6e46b0/libvirt-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:38 crc kubenswrapper[4961]: I1205 18:40:38.981510 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-88c564b55-ktjt8_84b8d919-505e-44ba-b19a-532ec4df3533/neutron-httpd/0.log" Dec 05 18:40:39 crc kubenswrapper[4961]: I1205 18:40:39.044172 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-88c564b55-ktjt8_84b8d919-505e-44ba-b19a-532ec4df3533/neutron-api/0.log" Dec 05 18:40:39 crc kubenswrapper[4961]: I1205 18:40:39.185810 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_neutron-metadata-edpm-deployment-openstack-edpm-ipam-vrzbk_baa4c345-2f59-42ac-a33e-c350c642a73c/neutron-metadata-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:39 crc kubenswrapper[4961]: I1205 18:40:39.701615 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_258d7583-7072-4621-8490-c0bfcc91abff/nova-api-log/0.log" Dec 05 18:40:39 crc kubenswrapper[4961]: I1205 18:40:39.793526 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell0-conductor-0_b8e29a1c-419b-4ab0-84ca-b87652bf1812/nova-cell0-conductor-conductor/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.086786 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-conductor-0_1e14fb01-680d-425f-a35b-c6346f47b86d/nova-cell1-conductor-conductor/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.134828 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-api-0_258d7583-7072-4621-8490-c0bfcc91abff/nova-api-api/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.172154 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-cell1-novncproxy-0_2de9d115-6198-4316-a304-1e4eca7cdd98/nova-cell1-novncproxy-novncproxy/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.328154 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-edpm-deployment-openstack-edpm-ipam-crdt6_19f9b2f9-7ecf-4676-bd98-c3c4615d12c9/nova-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.479399 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_56cc0cd5-a044-49c6-946c-82e56b2c4d57/nova-metadata-log/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.802800 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_786a3535-1c16-4389-9239-49f6d349c3af/mysql-bootstrap/0.log" Dec 05 18:40:40 crc kubenswrapper[4961]: I1205 18:40:40.811158 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-scheduler-0_839ae7fd-5c5c-4767-b0d8-c7f24f17b03b/nova-scheduler-scheduler/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.010126 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_786a3535-1c16-4389-9239-49f6d349c3af/mysql-bootstrap/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.020533 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-cell1-galera-0_786a3535-1c16-4389-9239-49f6d349c3af/galera/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.183124 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78297e26-2e01-4bb1-8f35-c96861dfda09/mysql-bootstrap/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.420872 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78297e26-2e01-4bb1-8f35-c96861dfda09/galera/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.431231 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstack-galera-0_78297e26-2e01-4bb1-8f35-c96861dfda09/mysql-bootstrap/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.606104 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_openstackclient_ce8ed9d8-89a0-4d15-9f08-d30111d16a2e/openstackclient/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.700497 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ldph7_6b6b1e99-e081-4c93-8fe8-c693eb7a0205/ovn-controller/0.log" Dec 05 18:40:41 crc kubenswrapper[4961]: I1205 18:40:41.728109 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_nova-metadata-0_56cc0cd5-a044-49c6-946c-82e56b2c4d57/nova-metadata-metadata/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.197709 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-metrics-bkcrz_3854940d-06c0-4afd-a62f-eeeff97e5b7f/openstack-network-exporter/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.202833 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovsdb-server-init/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.402764 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovsdb-server-init/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.468898 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovs-vswitchd/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.474947 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-controller-ovs-c9ff2_db7a99f8-4e0e-408b-9b96-39340c35d4d8/ovsdb-server/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.650373 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_419fa856-384a-4fd1-95e7-7810e12b1307/openstack-network-exporter/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.668247 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-edpm-deployment-openstack-edpm-ipam-nrkgn_2428c0ac-375e-4515-9dfd-39f68c6e6ed1/ovn-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.723657 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovn-northd-0_419fa856-384a-4fd1-95e7-7810e12b1307/ovn-northd/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.876381 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_38c3904c-e6f1-4d83-bfbc-d5c39f52d67d/openstack-network-exporter/0.log" Dec 05 18:40:42 crc kubenswrapper[4961]: I1205 18:40:42.937655 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-nb-0_38c3904c-e6f1-4d83-bfbc-d5c39f52d67d/ovsdbserver-nb/0.log" Dec 05 18:40:43 crc kubenswrapper[4961]: I1205 18:40:43.111768 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1603a1ba-53e0-4707-a222-392195709f98/openstack-network-exporter/0.log" Dec 05 18:40:43 crc kubenswrapper[4961]: I1205 18:40:43.131740 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ovsdbserver-sb-0_1603a1ba-53e0-4707-a222-392195709f98/ovsdbserver-sb/0.log" Dec 05 18:40:43 crc kubenswrapper[4961]: I1205 18:40:43.316738 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-846ff7879b-wj44p_9e625db8-8bfa-4a00-957c-8a31f781da4f/placement-api/0.log" Dec 05 18:40:43 crc kubenswrapper[4961]: I1205 18:40:43.423466 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_placement-846ff7879b-wj44p_9e625db8-8bfa-4a00-957c-8a31f781da4f/placement-log/0.log" Dec 05 18:40:43 crc kubenswrapper[4961]: I1205 18:40:43.603705 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2deb3a6e-b9b0-4e6d-a755-286adb0a3975/setup-container/0.log" Dec 05 18:40:43 crc kubenswrapper[4961]: I1205 18:40:43.956009 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2deb3a6e-b9b0-4e6d-a755-286adb0a3975/setup-container/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.019647 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-cell1-server-0_2deb3a6e-b9b0-4e6d-a755-286adb0a3975/rabbitmq/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.033692 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c21abdd4-f06b-4865-8880-0603525e1cb1/setup-container/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.225873 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c21abdd4-f06b-4865-8880-0603525e1cb1/rabbitmq/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.232335 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_rabbitmq-server-0_c21abdd4-f06b-4865-8880-0603525e1cb1/setup-container/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.328632 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_reboot-os-edpm-deployment-openstack-edpm-ipam-tw5j8_f2a419b8-744d-4932-a845-d9376364834b/reboot-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.488994 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_redhat-edpm-deployment-openstack-edpm-ipam-5wwq4_c829fd2b-07d9-4aaf-b50d-1db5cba1ccc3/redhat-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.556946 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_repo-setup-edpm-deployment-openstack-edpm-ipam-m8xj8_1c42b2d0-4525-4847-a505-a625b88765b9/repo-setup-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.773041 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_ssh-known-hosts-edpm-deployment-qgg4h_fdec8a7a-22cd-486f-81e5-9a8a7931b9bc/ssh-known-hosts-edpm-deployment/0.log" Dec 05 18:40:44 crc kubenswrapper[4961]: I1205 18:40:44.775742 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_run-os-edpm-deployment-openstack-edpm-ipam-mdm44_53528787-f94b-4255-bb5b-57b8c583eaaf/run-os-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.062805 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5d77d54f6c-4pccf_dcca830e-9231-4c67-b5fa-669102d7ecc8/proxy-server/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.211575 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-ring-rebalance-2mrxc_2b26a944-dad9-45ea-b636-5f2ddaadc80d/swift-ring-rebalance/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.217867 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-proxy-5d77d54f6c-4pccf_dcca830e-9231-4c67-b5fa-669102d7ecc8/proxy-httpd/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.236607 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-auditor/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.405722 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-replicator/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.478538 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-reaper/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.487259 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/account-server/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.609600 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-auditor/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.638243 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-replicator/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.677899 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-server/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.714687 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/container-updater/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.811469 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-auditor/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.873418 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-expirer/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.901867 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-server/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.934263 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-replicator/0.log" Dec 05 18:40:45 crc kubenswrapper[4961]: I1205 18:40:45.994664 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/object-updater/0.log" Dec 05 18:40:46 crc kubenswrapper[4961]: I1205 18:40:46.076414 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/rsync/0.log" Dec 05 18:40:46 crc kubenswrapper[4961]: I1205 18:40:46.106651 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_swift-storage-0_e533098a-ca28-487e-8471-7a426defda37/swift-recon-cron/0.log" Dec 05 18:40:46 crc kubenswrapper[4961]: I1205 18:40:46.262128 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_telemetry-edpm-deployment-openstack-edpm-ipam-7gfxq_96c8a8f6-8c0a-4c00-b80e-719556036c4e/telemetry-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:46 crc kubenswrapper[4961]: I1205 18:40:46.350575 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_tempest-tests-tempest_61480a22-be11-4fc5-83fa-8736dec80842/tempest-tests-tempest-tests-runner/0.log" Dec 05 18:40:46 crc kubenswrapper[4961]: I1205 18:40:46.462567 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_test-operator-logs-pod-tempest-tempest-tests-tempest_8be5fc0b-4a17-4763-a30c-3d2053d33f29/test-operator-logs-container/0.log" Dec 05 18:40:46 crc kubenswrapper[4961]: I1205 18:40:46.598949 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_validate-network-edpm-deployment-openstack-edpm-ipam-wp5zn_bed15574-a82b-4a31-baa8-8ddfc4a93972/validate-network-edpm-deployment-openstack-edpm-ipam/0.log" Dec 05 18:40:57 crc kubenswrapper[4961]: I1205 18:40:57.114246 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack_memcached-0_4c503456-1649-444c-a321-687b4294d2fa/memcached/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.023539 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/util/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.215014 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/util/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.228936 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/pull/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.262705 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/pull/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.385237 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/util/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.403159 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/pull/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.427236 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_2449f7ff0c8d9d61511a24469bf3b6f4ace4e9ae38cc0a2c1f3b237889rknkq_39126237-6470-4ff9-9dea-d2a7c88a2540/extract/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.575341 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-cnqj9_705eb884-eb46-4d59-86ee-c2f1587d5df4/kube-rbac-proxy/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.608725 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_barbican-operator-controller-manager-7d9dfd778-cnqj9_705eb884-eb46-4d59-86ee-c2f1587d5df4/manager/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.644999 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-nw6x8_406dca34-428b-493b-b564-511542c2bad6/kube-rbac-proxy/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.788863 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_cinder-operator-controller-manager-859b6ccc6-nw6x8_406dca34-428b-493b-b564-511542c2bad6/manager/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.794234 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-qnmsr_6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991/kube-rbac-proxy/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.818819 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_designate-operator-controller-manager-78b4bc895b-qnmsr_6a4cdfbf-8697-4f8f-9d07-b5aaa5e05991/manager/0.log" Dec 05 18:41:13 crc kubenswrapper[4961]: I1205 18:41:13.966938 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-dlwzz_0b1fd140-fbf5-4d64-950b-b0bdcd07ec54/kube-rbac-proxy/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.074824 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_glance-operator-controller-manager-77987cd8cd-dlwzz_0b1fd140-fbf5-4d64-950b-b0bdcd07ec54/manager/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.138565 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-95v4g_4d42ce43-3c27-4007-a20b-e0068beb2490/kube-rbac-proxy/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.168719 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_heat-operator-controller-manager-5f64f6f8bb-95v4g_4d42ce43-3c27-4007-a20b-e0068beb2490/manager/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.244630 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gd5nk_af9a8d55-8cff-40e2-9f1b-bbd05c3eea80/kube-rbac-proxy/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.319065 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_horizon-operator-controller-manager-68c6d99b8f-gd5nk_af9a8d55-8cff-40e2-9f1b-bbd05c3eea80/manager/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.380474 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-84b9cfc694-lml7r_77f7ec48-3abf-4934-a703-fa3f5edfbd27/kube-rbac-proxy/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.626535 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-zjbvv_2bb86b5f-1ee1-48c0-bcc1-60ca583c1339/kube-rbac-proxy/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.651554 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ironic-operator-controller-manager-6c548fd776-zjbvv_2bb86b5f-1ee1-48c0-bcc1-60ca583c1339/manager/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.688372 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_infra-operator-controller-manager-84b9cfc694-lml7r_77f7ec48-3abf-4934-a703-fa3f5edfbd27/manager/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.796757 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bnjfr_0499fdb9-20d5-445c-9ca0-4492287fbcc0/kube-rbac-proxy/0.log" Dec 05 18:41:14 crc kubenswrapper[4961]: I1205 18:41:14.850641 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_keystone-operator-controller-manager-7765d96ddf-bnjfr_0499fdb9-20d5-445c-9ca0-4492287fbcc0/manager/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.000557 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-q2p4h_90df3fec-9bc9-48ca-a432-374c1f7e2002/kube-rbac-proxy/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.075063 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-4whjc_f7757573-1085-4560-880c-3d9b36ce93f7/kube-rbac-proxy/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.075985 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_manila-operator-controller-manager-7c79b5df47-q2p4h_90df3fec-9bc9-48ca-a432-374c1f7e2002/manager/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.200479 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_mariadb-operator-controller-manager-56bbcc9d85-4whjc_f7757573-1085-4560-880c-3d9b36ce93f7/manager/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.268077 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-pszjn_f7ff9bd4-8a05-4a50-b38b-701451107b9f/kube-rbac-proxy/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.372173 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_neutron-operator-controller-manager-5fdfd5b6b5-pszjn_f7ff9bd4-8a05-4a50-b38b-701451107b9f/manager/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.476830 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hgv2r_e661480b-d6fd-4c16-9f03-f519092d05c6/kube-rbac-proxy/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.562759 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_nova-operator-controller-manager-697bc559fc-hgv2r_e661480b-d6fd-4c16-9f03-f519092d05c6/manager/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.579435 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2b6j2_a69e4847-13bc-4c1e-82a9-546fb11ad38d/kube-rbac-proxy/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.719563 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_octavia-operator-controller-manager-998648c74-2b6j2_a69e4847-13bc-4c1e-82a9-546fb11ad38d/manager/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.728340 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl_ccea0c2d-817c-4895-b8a7-bf852bd12aa9/kube-rbac-proxy/0.log" Dec 05 18:41:15 crc kubenswrapper[4961]: I1205 18:41:15.821936 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-baremetal-operator-controller-manager-64bc77cfd4cznpl_ccea0c2d-817c-4895-b8a7-bf852bd12aa9/manager/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.119370 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-operator-d885c5b7-b9mhp_8359ac81-7e2e-4a86-9052-2cba7e945d40/operator/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.171597 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-index-vblkt_2fdc52dc-265b-42d1-8b82-4e2c0fbccb3b/registry-server/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.362282 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jghkw_8fd2cc5d-67e4-4b9a-9d0c-58993491bc08/kube-rbac-proxy/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.436496 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_ovn-operator-controller-manager-b6456fdb6-jghkw_8fd2cc5d-67e4-4b9a-9d0c-58993491bc08/manager/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.513852 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-zbcdb_7ff3fbef-1490-4ce9-b350-03a0a7182b78/kube-rbac-proxy/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.663836 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_placement-operator-controller-manager-78f8948974-zbcdb_7ff3fbef-1490-4ce9-b350-03a0a7182b78/manager/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.748046 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_rabbitmq-cluster-operator-manager-668c99d594-4dbp7_39ba1343-9933-483d-aef2-90e0ceb14c79/operator/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.928270 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-c8x5r_767519b7-2067-4fca-a96b-bf9b02e1b273/manager/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.938765 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_swift-operator-controller-manager-5f8c65bbfc-c8x5r_767519b7-2067-4fca-a96b-bf9b02e1b273/kube-rbac-proxy/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.964409 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-dsmtk_0770d71d-11ea-4b63-8a98-31521f395686/kube-rbac-proxy/0.log" Dec 05 18:41:16 crc kubenswrapper[4961]: I1205 18:41:16.965061 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_openstack-operator-controller-manager-777bfdfd44-xwwmn_619cb5c4-1a5a-4eb8-ad2d-28615e0dc607/manager/0.log" Dec 05 18:41:17 crc kubenswrapper[4961]: I1205 18:41:17.142675 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_telemetry-operator-controller-manager-76cc84c6bb-dsmtk_0770d71d-11ea-4b63-8a98-31521f395686/manager/0.log" Dec 05 18:41:17 crc kubenswrapper[4961]: I1205 18:41:17.166936 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4k7dd_e27c6cc8-ef18-421e-8a7b-1b6bb2227724/manager/0.log" Dec 05 18:41:17 crc kubenswrapper[4961]: I1205 18:41:17.183203 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_test-operator-controller-manager-5854674fcc-4k7dd_e27c6cc8-ef18-421e-8a7b-1b6bb2227724/kube-rbac-proxy/0.log" Dec 05 18:41:17 crc kubenswrapper[4961]: I1205 18:41:17.298078 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2jz8d_c567a803-253f-4895-a504-caee7ba37c34/kube-rbac-proxy/0.log" Dec 05 18:41:17 crc kubenswrapper[4961]: I1205 18:41:17.312668 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openstack-operators_watcher-operator-controller-manager-769dc69bc-2jz8d_c567a803-253f-4895-a504-caee7ba37c34/manager/0.log" Dec 05 18:41:27 crc kubenswrapper[4961]: I1205 18:41:27.245297 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:41:27 crc kubenswrapper[4961]: I1205 18:41:27.245918 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:41:36 crc kubenswrapper[4961]: I1205 18:41:36.259585 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-5hmrk_601ba962-a964-472d-b481-4946fd7265b1/control-plane-machine-set-operator/0.log" Dec 05 18:41:36 crc kubenswrapper[4961]: I1205 18:41:36.435488 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6px8t_475e25de-63ce-4cae-8fc6-4c057d616247/kube-rbac-proxy/0.log" Dec 05 18:41:36 crc kubenswrapper[4961]: I1205 18:41:36.460816 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-6px8t_475e25de-63ce-4cae-8fc6-4c057d616247/machine-api-operator/0.log" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.126811 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gmxq7"] Dec 05 18:41:41 crc kubenswrapper[4961]: E1205 18:41:41.127629 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8e89c27a-ad06-499b-875d-d9cea0cf4ad1" containerName="container-00" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.127642 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="8e89c27a-ad06-499b-875d-d9cea0cf4ad1" containerName="container-00" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.127904 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="8e89c27a-ad06-499b-875d-d9cea0cf4ad1" containerName="container-00" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.129119 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.170155 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gmxq7"] Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.219632 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-utilities\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.220670 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-catalog-content\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.220715 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7cgm\" (UniqueName: \"kubernetes.io/projected/375879fa-7d54-42c2-be13-8f56360a340e-kube-api-access-l7cgm\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.322390 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-catalog-content\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.322871 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7cgm\" (UniqueName: \"kubernetes.io/projected/375879fa-7d54-42c2-be13-8f56360a340e-kube-api-access-l7cgm\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.323053 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-utilities\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.323130 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-catalog-content\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.323502 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-utilities\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.354499 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7cgm\" (UniqueName: \"kubernetes.io/projected/375879fa-7d54-42c2-be13-8f56360a340e-kube-api-access-l7cgm\") pod \"redhat-operators-gmxq7\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.450841 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:41 crc kubenswrapper[4961]: I1205 18:41:41.883430 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gmxq7"] Dec 05 18:41:42 crc kubenswrapper[4961]: I1205 18:41:42.687153 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerStarted","Data":"92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9"} Dec 05 18:41:42 crc kubenswrapper[4961]: I1205 18:41:42.687879 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerStarted","Data":"0b9f4e243abb7dcfa22b0785fa243982fa082040b6880994e10ddc689f223aba"} Dec 05 18:41:43 crc kubenswrapper[4961]: I1205 18:41:43.697751 4961 generic.go:334] "Generic (PLEG): container finished" podID="375879fa-7d54-42c2-be13-8f56360a340e" containerID="92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9" exitCode=0 Dec 05 18:41:43 crc kubenswrapper[4961]: I1205 18:41:43.697824 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerDied","Data":"92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9"} Dec 05 18:41:45 crc kubenswrapper[4961]: I1205 18:41:45.721106 4961 generic.go:334] "Generic (PLEG): container finished" podID="375879fa-7d54-42c2-be13-8f56360a340e" containerID="4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5" exitCode=0 Dec 05 18:41:45 crc kubenswrapper[4961]: I1205 18:41:45.721384 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerDied","Data":"4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5"} Dec 05 18:41:46 crc kubenswrapper[4961]: I1205 18:41:46.735275 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerStarted","Data":"ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152"} Dec 05 18:41:46 crc kubenswrapper[4961]: I1205 18:41:46.770436 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gmxq7" podStartSLOduration=3.357193661 podStartE2EDuration="5.770401073s" podCreationTimestamp="2025-12-05 18:41:41 +0000 UTC" firstStartedPulling="2025-12-05 18:41:43.701760827 +0000 UTC m=+4109.762911300" lastFinishedPulling="2025-12-05 18:41:46.114968239 +0000 UTC m=+4112.176118712" observedRunningTime="2025-12-05 18:41:46.76290099 +0000 UTC m=+4112.824051493" watchObservedRunningTime="2025-12-05 18:41:46.770401073 +0000 UTC m=+4112.831551576" Dec 05 18:41:50 crc kubenswrapper[4961]: I1205 18:41:50.374038 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-5b446d88c5-w62zs_69ce6fc6-0715-4f9c-9c01-3db5dfcbf386/cert-manager-controller/0.log" Dec 05 18:41:50 crc kubenswrapper[4961]: I1205 18:41:50.548370 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-cainjector-7f985d654d-jxh56_3186034b-e42e-4c32-a5ad-942d6bbb0659/cert-manager-cainjector/0.log" Dec 05 18:41:50 crc kubenswrapper[4961]: I1205 18:41:50.625680 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/cert-manager_cert-manager-webhook-5655c58dd6-xzk6r_6a524c2b-8219-4740-b09e-0b855aa04c35/cert-manager-webhook/0.log" Dec 05 18:41:51 crc kubenswrapper[4961]: I1205 18:41:51.451444 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:51 crc kubenswrapper[4961]: I1205 18:41:51.451503 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:41:52 crc kubenswrapper[4961]: I1205 18:41:52.501878 4961 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-gmxq7" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="registry-server" probeResult="failure" output=< Dec 05 18:41:52 crc kubenswrapper[4961]: timeout: failed to connect service ":50051" within 1s Dec 05 18:41:52 crc kubenswrapper[4961]: > Dec 05 18:41:57 crc kubenswrapper[4961]: I1205 18:41:57.246563 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:41:57 crc kubenswrapper[4961]: I1205 18:41:57.247203 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:42:01 crc kubenswrapper[4961]: I1205 18:42:01.517225 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:42:01 crc kubenswrapper[4961]: I1205 18:42:01.599328 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:42:01 crc kubenswrapper[4961]: I1205 18:42:01.751699 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gmxq7"] Dec 05 18:42:02 crc kubenswrapper[4961]: I1205 18:42:02.908636 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gmxq7" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="registry-server" containerID="cri-o://ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152" gracePeriod=2 Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.348697 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.492339 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-utilities\") pod \"375879fa-7d54-42c2-be13-8f56360a340e\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.492425 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-catalog-content\") pod \"375879fa-7d54-42c2-be13-8f56360a340e\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.492494 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7cgm\" (UniqueName: \"kubernetes.io/projected/375879fa-7d54-42c2-be13-8f56360a340e-kube-api-access-l7cgm\") pod \"375879fa-7d54-42c2-be13-8f56360a340e\" (UID: \"375879fa-7d54-42c2-be13-8f56360a340e\") " Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.492910 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-utilities" (OuterVolumeSpecName: "utilities") pod "375879fa-7d54-42c2-be13-8f56360a340e" (UID: "375879fa-7d54-42c2-be13-8f56360a340e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.594761 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.610021 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "375879fa-7d54-42c2-be13-8f56360a340e" (UID: "375879fa-7d54-42c2-be13-8f56360a340e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.696953 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/375879fa-7d54-42c2-be13-8f56360a340e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.922285 4961 generic.go:334] "Generic (PLEG): container finished" podID="375879fa-7d54-42c2-be13-8f56360a340e" containerID="ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152" exitCode=0 Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.922351 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gmxq7" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.922400 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerDied","Data":"ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152"} Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.924041 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gmxq7" event={"ID":"375879fa-7d54-42c2-be13-8f56360a340e","Type":"ContainerDied","Data":"0b9f4e243abb7dcfa22b0785fa243982fa082040b6880994e10ddc689f223aba"} Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.924081 4961 scope.go:117] "RemoveContainer" containerID="ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152" Dec 05 18:42:03 crc kubenswrapper[4961]: I1205 18:42:03.950748 4961 scope.go:117] "RemoveContainer" containerID="4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.475095 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/375879fa-7d54-42c2-be13-8f56360a340e-kube-api-access-l7cgm" (OuterVolumeSpecName: "kube-api-access-l7cgm") pod "375879fa-7d54-42c2-be13-8f56360a340e" (UID: "375879fa-7d54-42c2-be13-8f56360a340e"). InnerVolumeSpecName "kube-api-access-l7cgm". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.489498 4961 scope.go:117] "RemoveContainer" containerID="92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.512495 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7cgm\" (UniqueName: \"kubernetes.io/projected/375879fa-7d54-42c2-be13-8f56360a340e-kube-api-access-l7cgm\") on node \"crc\" DevicePath \"\"" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.797173 4961 scope.go:117] "RemoveContainer" containerID="ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152" Dec 05 18:42:04 crc kubenswrapper[4961]: E1205 18:42:04.797723 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152\": container with ID starting with ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152 not found: ID does not exist" containerID="ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.797755 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152"} err="failed to get container status \"ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152\": rpc error: code = NotFound desc = could not find container \"ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152\": container with ID starting with ce8d4b05870f465518aaa65446a0fd40ca58fd5f95e6d352455cf09d31300152 not found: ID does not exist" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.797870 4961 scope.go:117] "RemoveContainer" containerID="4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5" Dec 05 18:42:04 crc kubenswrapper[4961]: E1205 18:42:04.798215 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5\": container with ID starting with 4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5 not found: ID does not exist" containerID="4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.798251 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5"} err="failed to get container status \"4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5\": rpc error: code = NotFound desc = could not find container \"4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5\": container with ID starting with 4f9c2c94738485033a5db94209ef0d31b1c41a136465d5b4cb11f70be09932b5 not found: ID does not exist" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.798290 4961 scope.go:117] "RemoveContainer" containerID="92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9" Dec 05 18:42:04 crc kubenswrapper[4961]: E1205 18:42:04.799415 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9\": container with ID starting with 92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9 not found: ID does not exist" containerID="92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.799448 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9"} err="failed to get container status \"92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9\": rpc error: code = NotFound desc = could not find container \"92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9\": container with ID starting with 92acbe17236364130ec09bf67be8f28d0990b075b16e7c281b3ffdea74e449c9 not found: ID does not exist" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.874501 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gmxq7"] Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.874767 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gmxq7"] Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.936580 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-console-plugin-7fbb5f6569-h4zpz_c6ab1d3c-7d4b-4fe2-8c37-bf92924d5ae7/nmstate-console-plugin/0.log" Dec 05 18:42:04 crc kubenswrapper[4961]: I1205 18:42:04.950455 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-handler-sp6lw_5593be6a-3351-48eb-ac4b-e34d11ac0b49/nmstate-handler/0.log" Dec 05 18:42:05 crc kubenswrapper[4961]: I1205 18:42:05.136739 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-r585k_60b41a10-29ea-46ca-bc08-8c0473394b19/kube-rbac-proxy/0.log" Dec 05 18:42:05 crc kubenswrapper[4961]: I1205 18:42:05.151257 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-metrics-7f946cbc9-r585k_60b41a10-29ea-46ca-bc08-8c0473394b19/nmstate-metrics/0.log" Dec 05 18:42:05 crc kubenswrapper[4961]: I1205 18:42:05.271647 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-operator-5b5b58f5c8-q2qxh_7d46084a-4838-43be-80f5-54ada85ff38f/nmstate-operator/0.log" Dec 05 18:42:05 crc kubenswrapper[4961]: I1205 18:42:05.349783 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-nmstate_nmstate-webhook-5f6d4c5ccb-k6qjw_ea22d5fb-23bc-436e-8ab7-80e86c571c28/nmstate-webhook/0.log" Dec 05 18:42:06 crc kubenswrapper[4961]: I1205 18:42:06.875326 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="375879fa-7d54-42c2-be13-8f56360a340e" path="/var/lib/kubelet/pods/375879fa-7d54-42c2-be13-8f56360a340e/volumes" Dec 05 18:42:20 crc kubenswrapper[4961]: I1205 18:42:20.625750 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-k6nwk_3f6c280d-42e8-40a5-b11c-e9a80f81125b/kube-rbac-proxy/0.log" Dec 05 18:42:20 crc kubenswrapper[4961]: I1205 18:42:20.749862 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-f8648f98b-k6nwk_3f6c280d-42e8-40a5-b11c-e9a80f81125b/controller/0.log" Dec 05 18:42:20 crc kubenswrapper[4961]: I1205 18:42:20.887593 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.050809 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.073649 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.085327 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.113241 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.260275 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.296847 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.342170 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.373637 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.480016 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-frr-files/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.502371 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-metrics/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.510951 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/cp-reloader/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.546360 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/controller/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.679464 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/frr-metrics/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.730480 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/kube-rbac-proxy/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.730713 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/kube-rbac-proxy-frr/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.901071 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/reloader/0.log" Dec 05 18:42:21 crc kubenswrapper[4961]: I1205 18:42:21.973588 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7fcb986d4-gsnw2_013262fd-338d-4a14-89f8-d682d09916f2/frr-k8s-webhook-server/0.log" Dec 05 18:42:22 crc kubenswrapper[4961]: I1205 18:42:22.240725 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-67f59cc659-4mmbn_1dcf69fa-3afe-4fb7-a64b-838dbab83937/manager/0.log" Dec 05 18:42:22 crc kubenswrapper[4961]: I1205 18:42:22.432893 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-7c6bcfcd7d-5ng2j_6139f869-6350-4649-a6fd-a969bf96e18a/webhook-server/0.log" Dec 05 18:42:22 crc kubenswrapper[4961]: I1205 18:42:22.478829 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pcwhx_b1a0921b-0a54-4163-8931-0b6ef9dd1051/kube-rbac-proxy/0.log" Dec 05 18:42:22 crc kubenswrapper[4961]: I1205 18:42:22.995987 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bfzd4_17678ad4-8645-4f4f-a752-c1f92d4610f6/frr/0.log" Dec 05 18:42:23 crc kubenswrapper[4961]: I1205 18:42:23.087892 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-pcwhx_b1a0921b-0a54-4163-8931-0b6ef9dd1051/speaker/0.log" Dec 05 18:42:27 crc kubenswrapper[4961]: I1205 18:42:27.251597 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:42:27 crc kubenswrapper[4961]: I1205 18:42:27.252349 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:42:27 crc kubenswrapper[4961]: I1205 18:42:27.252406 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:42:27 crc kubenswrapper[4961]: I1205 18:42:27.253321 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f1a110177a9eeba85f570ef0c42f98bf4014c9fcc0be0c4a67f6cf96d7337c8c"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:42:27 crc kubenswrapper[4961]: I1205 18:42:27.253389 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://f1a110177a9eeba85f570ef0c42f98bf4014c9fcc0be0c4a67f6cf96d7337c8c" gracePeriod=600 Dec 05 18:42:28 crc kubenswrapper[4961]: I1205 18:42:28.153254 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="f1a110177a9eeba85f570ef0c42f98bf4014c9fcc0be0c4a67f6cf96d7337c8c" exitCode=0 Dec 05 18:42:28 crc kubenswrapper[4961]: I1205 18:42:28.153312 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"f1a110177a9eeba85f570ef0c42f98bf4014c9fcc0be0c4a67f6cf96d7337c8c"} Dec 05 18:42:28 crc kubenswrapper[4961]: I1205 18:42:28.153814 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerStarted","Data":"4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32"} Dec 05 18:42:28 crc kubenswrapper[4961]: I1205 18:42:28.153837 4961 scope.go:117] "RemoveContainer" containerID="74fd002f0b7405a6427018a622bb02c7668ea233bebf64512ad51cf067b60828" Dec 05 18:42:36 crc kubenswrapper[4961]: I1205 18:42:36.777553 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/util/0.log" Dec 05 18:42:36 crc kubenswrapper[4961]: I1205 18:42:36.919195 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/util/0.log" Dec 05 18:42:36 crc kubenswrapper[4961]: I1205 18:42:36.982209 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/pull/0.log" Dec 05 18:42:36 crc kubenswrapper[4961]: I1205 18:42:36.986577 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/pull/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.134031 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/util/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.161269 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/pull/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.175386 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_5064f9f8917b246f69f5d7fc025e7e6c34236c02bca31167615d38212fdjbt5_3dfe66b7-513e-4e53-854d-b1d0d9ca8acd/extract/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.377252 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/util/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.565832 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/pull/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.565985 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/pull/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.573293 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/util/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.700542 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/pull/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.714285 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/util/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.723351 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_af69698b82fdf008f5ff9e195cf808a654240e16b13dcd924b74994f83h2zfz_9f04f5e2-a474-427b-a466-77d789a6daa7/extract/0.log" Dec 05 18:42:37 crc kubenswrapper[4961]: I1205 18:42:37.893377 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-utilities/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.210070 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-content/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.213995 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-utilities/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.250522 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-content/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.409293 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-content/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.450698 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/extract-utilities/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.635473 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/extract-utilities/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.877529 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/extract-content/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.903208 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/extract-content/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.961370 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-x247h_9f2bf186-07e1-4212-a88b-377cb9dcc1e4/registry-server/0.log" Dec 05 18:42:38 crc kubenswrapper[4961]: I1205 18:42:38.983917 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/extract-utilities/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.110873 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/extract-content/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.194272 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/extract-utilities/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.266427 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-2xxct_e334c4db-efe0-4d34-84c1-a9bc5e2a57e5/registry-server/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.336414 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-kxwjj_bb5a393d-4029-4474-937c-3ddf348254f2/marketplace-operator/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.446205 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-utilities/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.651512 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-content/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.687503 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-content/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.698947 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-utilities/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.870493 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-utilities/0.log" Dec 05 18:42:39 crc kubenswrapper[4961]: I1205 18:42:39.914508 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/extract-content/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.081597 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-6s9p6_6a05d058-cde3-438c-805f-90d265994736/registry-server/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.087281 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-utilities/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.329277 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-content/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.358448 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-utilities/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.402917 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-content/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.592908 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-utilities/0.log" Dec 05 18:42:40 crc kubenswrapper[4961]: I1205 18:42:40.620982 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/extract-content/0.log" Dec 05 18:42:41 crc kubenswrapper[4961]: I1205 18:42:41.047449 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-9k6x4_4e3eba44-cc06-4bc6-83c2-66fcfde32591/registry-server/0.log" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.849676 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-mj4tn"] Dec 05 18:43:12 crc kubenswrapper[4961]: E1205 18:43:12.850486 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="registry-server" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.850498 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="registry-server" Dec 05 18:43:12 crc kubenswrapper[4961]: E1205 18:43:12.850521 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="extract-content" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.850527 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="extract-content" Dec 05 18:43:12 crc kubenswrapper[4961]: E1205 18:43:12.850561 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="extract-utilities" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.850567 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="extract-utilities" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.850731 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="375879fa-7d54-42c2-be13-8f56360a340e" containerName="registry-server" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.852029 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.885483 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj4tn"] Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.960197 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h858c\" (UniqueName: \"kubernetes.io/projected/a0470f56-267a-433b-8155-9e1fed3d3029-kube-api-access-h858c\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.960272 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-catalog-content\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:12 crc kubenswrapper[4961]: I1205 18:43:12.960477 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-utilities\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.061905 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-catalog-content\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.062047 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-utilities\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.062128 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h858c\" (UniqueName: \"kubernetes.io/projected/a0470f56-267a-433b-8155-9e1fed3d3029-kube-api-access-h858c\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.062427 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-catalog-content\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.062528 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-utilities\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.085350 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h858c\" (UniqueName: \"kubernetes.io/projected/a0470f56-267a-433b-8155-9e1fed3d3029-kube-api-access-h858c\") pod \"redhat-marketplace-mj4tn\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.173363 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:13 crc kubenswrapper[4961]: I1205 18:43:13.738916 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj4tn"] Dec 05 18:43:14 crc kubenswrapper[4961]: I1205 18:43:14.584329 4961 generic.go:334] "Generic (PLEG): container finished" podID="a0470f56-267a-433b-8155-9e1fed3d3029" containerID="ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b" exitCode=0 Dec 05 18:43:14 crc kubenswrapper[4961]: I1205 18:43:14.584818 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj4tn" event={"ID":"a0470f56-267a-433b-8155-9e1fed3d3029","Type":"ContainerDied","Data":"ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b"} Dec 05 18:43:14 crc kubenswrapper[4961]: I1205 18:43:14.585575 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj4tn" event={"ID":"a0470f56-267a-433b-8155-9e1fed3d3029","Type":"ContainerStarted","Data":"afe5a59af5c1aa8cd19718c25eee3f347689456bacd373580291e72bc0e30b78"} Dec 05 18:43:14 crc kubenswrapper[4961]: I1205 18:43:14.593368 4961 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Dec 05 18:43:15 crc kubenswrapper[4961]: I1205 18:43:15.595712 4961 generic.go:334] "Generic (PLEG): container finished" podID="a0470f56-267a-433b-8155-9e1fed3d3029" containerID="ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded" exitCode=0 Dec 05 18:43:15 crc kubenswrapper[4961]: I1205 18:43:15.595938 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj4tn" event={"ID":"a0470f56-267a-433b-8155-9e1fed3d3029","Type":"ContainerDied","Data":"ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded"} Dec 05 18:43:16 crc kubenswrapper[4961]: I1205 18:43:16.607373 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj4tn" event={"ID":"a0470f56-267a-433b-8155-9e1fed3d3029","Type":"ContainerStarted","Data":"ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118"} Dec 05 18:43:16 crc kubenswrapper[4961]: I1205 18:43:16.629987 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-mj4tn" podStartSLOduration=3.225960176 podStartE2EDuration="4.629967656s" podCreationTimestamp="2025-12-05 18:43:12 +0000 UTC" firstStartedPulling="2025-12-05 18:43:14.592930779 +0000 UTC m=+4200.654081262" lastFinishedPulling="2025-12-05 18:43:15.996938269 +0000 UTC m=+4202.058088742" observedRunningTime="2025-12-05 18:43:16.626038539 +0000 UTC m=+4202.687189012" watchObservedRunningTime="2025-12-05 18:43:16.629967656 +0000 UTC m=+4202.691118129" Dec 05 18:43:23 crc kubenswrapper[4961]: I1205 18:43:23.174078 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:23 crc kubenswrapper[4961]: I1205 18:43:23.174591 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:23 crc kubenswrapper[4961]: I1205 18:43:23.530760 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:23 crc kubenswrapper[4961]: I1205 18:43:23.740502 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:23 crc kubenswrapper[4961]: I1205 18:43:23.791192 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj4tn"] Dec 05 18:43:25 crc kubenswrapper[4961]: I1205 18:43:25.702889 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-mj4tn" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="registry-server" containerID="cri-o://ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118" gracePeriod=2 Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.676752 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.736397 4961 generic.go:334] "Generic (PLEG): container finished" podID="a0470f56-267a-433b-8155-9e1fed3d3029" containerID="ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118" exitCode=0 Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.736447 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj4tn" event={"ID":"a0470f56-267a-433b-8155-9e1fed3d3029","Type":"ContainerDied","Data":"ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118"} Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.736466 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-mj4tn" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.736483 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-mj4tn" event={"ID":"a0470f56-267a-433b-8155-9e1fed3d3029","Type":"ContainerDied","Data":"afe5a59af5c1aa8cd19718c25eee3f347689456bacd373580291e72bc0e30b78"} Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.736510 4961 scope.go:117] "RemoveContainer" containerID="ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.739123 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-catalog-content\") pod \"a0470f56-267a-433b-8155-9e1fed3d3029\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.739468 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h858c\" (UniqueName: \"kubernetes.io/projected/a0470f56-267a-433b-8155-9e1fed3d3029-kube-api-access-h858c\") pod \"a0470f56-267a-433b-8155-9e1fed3d3029\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.739577 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-utilities\") pod \"a0470f56-267a-433b-8155-9e1fed3d3029\" (UID: \"a0470f56-267a-433b-8155-9e1fed3d3029\") " Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.740302 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-utilities" (OuterVolumeSpecName: "utilities") pod "a0470f56-267a-433b-8155-9e1fed3d3029" (UID: "a0470f56-267a-433b-8155-9e1fed3d3029"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.749124 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0470f56-267a-433b-8155-9e1fed3d3029-kube-api-access-h858c" (OuterVolumeSpecName: "kube-api-access-h858c") pod "a0470f56-267a-433b-8155-9e1fed3d3029" (UID: "a0470f56-267a-433b-8155-9e1fed3d3029"). InnerVolumeSpecName "kube-api-access-h858c". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.761006 4961 scope.go:117] "RemoveContainer" containerID="ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.769350 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a0470f56-267a-433b-8155-9e1fed3d3029" (UID: "a0470f56-267a-433b-8155-9e1fed3d3029"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.820485 4961 scope.go:117] "RemoveContainer" containerID="ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.842625 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h858c\" (UniqueName: \"kubernetes.io/projected/a0470f56-267a-433b-8155-9e1fed3d3029-kube-api-access-h858c\") on node \"crc\" DevicePath \"\"" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.842681 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.842698 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a0470f56-267a-433b-8155-9e1fed3d3029-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.863001 4961 scope.go:117] "RemoveContainer" containerID="ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118" Dec 05 18:43:26 crc kubenswrapper[4961]: E1205 18:43:26.863535 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118\": container with ID starting with ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118 not found: ID does not exist" containerID="ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.863576 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118"} err="failed to get container status \"ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118\": rpc error: code = NotFound desc = could not find container \"ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118\": container with ID starting with ca9042dc82ecee970d28473daa7dfe0e79881bf4ec2c180d3d8d2ed15144f118 not found: ID does not exist" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.863602 4961 scope.go:117] "RemoveContainer" containerID="ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded" Dec 05 18:43:26 crc kubenswrapper[4961]: E1205 18:43:26.865348 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded\": container with ID starting with ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded not found: ID does not exist" containerID="ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.865369 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded"} err="failed to get container status \"ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded\": rpc error: code = NotFound desc = could not find container \"ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded\": container with ID starting with ba4ed635b4c66295d543dfaedb5e8c612c57748aa3f64204a79a27797d652ded not found: ID does not exist" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.865384 4961 scope.go:117] "RemoveContainer" containerID="ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b" Dec 05 18:43:26 crc kubenswrapper[4961]: E1205 18:43:26.866138 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b\": container with ID starting with ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b not found: ID does not exist" containerID="ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b" Dec 05 18:43:26 crc kubenswrapper[4961]: I1205 18:43:26.866200 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b"} err="failed to get container status \"ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b\": rpc error: code = NotFound desc = could not find container \"ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b\": container with ID starting with ed0d62906af14124fee9e6deb923f8850b364cd8dce9d017e6440c7ee844ab2b not found: ID does not exist" Dec 05 18:43:27 crc kubenswrapper[4961]: I1205 18:43:27.092853 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj4tn"] Dec 05 18:43:27 crc kubenswrapper[4961]: I1205 18:43:27.107997 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-mj4tn"] Dec 05 18:43:28 crc kubenswrapper[4961]: I1205 18:43:28.874899 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" path="/var/lib/kubelet/pods/a0470f56-267a-433b-8155-9e1fed3d3029/volumes" Dec 05 18:44:26 crc kubenswrapper[4961]: I1205 18:44:26.671629 4961 generic.go:334] "Generic (PLEG): container finished" podID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerID="fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef" exitCode=0 Dec 05 18:44:26 crc kubenswrapper[4961]: I1205 18:44:26.671703 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-vl9lg/must-gather-24cfw" event={"ID":"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2","Type":"ContainerDied","Data":"fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef"} Dec 05 18:44:26 crc kubenswrapper[4961]: I1205 18:44:26.672880 4961 scope.go:117] "RemoveContainer" containerID="fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef" Dec 05 18:44:27 crc kubenswrapper[4961]: I1205 18:44:27.064635 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vl9lg_must-gather-24cfw_c7e452cb-e813-4aeb-99f2-bbfa334ecdc2/gather/0.log" Dec 05 18:44:27 crc kubenswrapper[4961]: I1205 18:44:27.246336 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:44:27 crc kubenswrapper[4961]: I1205 18:44:27.246414 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.232471 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-vl9lg/must-gather-24cfw"] Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.233364 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-vl9lg/must-gather-24cfw" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="copy" containerID="cri-o://060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c" gracePeriod=2 Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.246025 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-vl9lg/must-gather-24cfw"] Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.781208 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vl9lg_must-gather-24cfw_c7e452cb-e813-4aeb-99f2-bbfa334ecdc2/copy/0.log" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.781929 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.793189 4961 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-vl9lg_must-gather-24cfw_c7e452cb-e813-4aeb-99f2-bbfa334ecdc2/copy/0.log" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.793859 4961 generic.go:334] "Generic (PLEG): container finished" podID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerID="060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c" exitCode=143 Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.794254 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-vl9lg/must-gather-24cfw" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.794385 4961 scope.go:117] "RemoveContainer" containerID="060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.816861 4961 scope.go:117] "RemoveContainer" containerID="fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.848549 4961 scope.go:117] "RemoveContainer" containerID="060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c" Dec 05 18:44:37 crc kubenswrapper[4961]: E1205 18:44:37.850208 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c\": container with ID starting with 060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c not found: ID does not exist" containerID="060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.850380 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c"} err="failed to get container status \"060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c\": rpc error: code = NotFound desc = could not find container \"060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c\": container with ID starting with 060db01dc538de0527023d8526589d211ccaa1305b1d783b35fcac34c861786c not found: ID does not exist" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.850408 4961 scope.go:117] "RemoveContainer" containerID="fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef" Dec 05 18:44:37 crc kubenswrapper[4961]: E1205 18:44:37.851447 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef\": container with ID starting with fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef not found: ID does not exist" containerID="fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.851477 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef"} err="failed to get container status \"fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef\": rpc error: code = NotFound desc = could not find container \"fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef\": container with ID starting with fbba0665853debd4ea2a7453944a75ac47f2e01fdd671bd3b0277286ae16d0ef not found: ID does not exist" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.886618 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58mgh\" (UniqueName: \"kubernetes.io/projected/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-kube-api-access-58mgh\") pod \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.888836 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-must-gather-output\") pod \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\" (UID: \"c7e452cb-e813-4aeb-99f2-bbfa334ecdc2\") " Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.897756 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-kube-api-access-58mgh" (OuterVolumeSpecName: "kube-api-access-58mgh") pod "c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" (UID: "c7e452cb-e813-4aeb-99f2-bbfa334ecdc2"). InnerVolumeSpecName "kube-api-access-58mgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:44:37 crc kubenswrapper[4961]: I1205 18:44:37.991370 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58mgh\" (UniqueName: \"kubernetes.io/projected/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-kube-api-access-58mgh\") on node \"crc\" DevicePath \"\"" Dec 05 18:44:38 crc kubenswrapper[4961]: I1205 18:44:38.078627 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" (UID: "c7e452cb-e813-4aeb-99f2-bbfa334ecdc2"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:44:38 crc kubenswrapper[4961]: I1205 18:44:38.096139 4961 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2-must-gather-output\") on node \"crc\" DevicePath \"\"" Dec 05 18:44:38 crc kubenswrapper[4961]: I1205 18:44:38.884694 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" path="/var/lib/kubelet/pods/c7e452cb-e813-4aeb-99f2-bbfa334ecdc2/volumes" Dec 05 18:44:57 crc kubenswrapper[4961]: I1205 18:44:57.246012 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:44:57 crc kubenswrapper[4961]: I1205 18:44:57.246606 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.221094 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j"] Dec 05 18:45:00 crc kubenswrapper[4961]: E1205 18:45:00.221978 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="gather" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.221996 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="gather" Dec 05 18:45:00 crc kubenswrapper[4961]: E1205 18:45:00.222020 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="extract-content" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222027 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="extract-content" Dec 05 18:45:00 crc kubenswrapper[4961]: E1205 18:45:00.222042 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="extract-utilities" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222049 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="extract-utilities" Dec 05 18:45:00 crc kubenswrapper[4961]: E1205 18:45:00.222062 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="registry-server" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222068 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="registry-server" Dec 05 18:45:00 crc kubenswrapper[4961]: E1205 18:45:00.222081 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="copy" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222091 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="copy" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222325 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="copy" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222341 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e452cb-e813-4aeb-99f2-bbfa334ecdc2" containerName="gather" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.222355 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0470f56-267a-433b-8155-9e1fed3d3029" containerName="registry-server" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.223323 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.225961 4961 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.226268 4961 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.235172 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1ff7313-107c-43e0-b19f-80311417dacf-secret-volume\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.235233 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1ff7313-107c-43e0-b19f-80311417dacf-config-volume\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.235581 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xw6pg\" (UniqueName: \"kubernetes.io/projected/b1ff7313-107c-43e0-b19f-80311417dacf-kube-api-access-xw6pg\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.241887 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j"] Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.337144 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1ff7313-107c-43e0-b19f-80311417dacf-secret-volume\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.337193 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1ff7313-107c-43e0-b19f-80311417dacf-config-volume\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.337284 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xw6pg\" (UniqueName: \"kubernetes.io/projected/b1ff7313-107c-43e0-b19f-80311417dacf-kube-api-access-xw6pg\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.338739 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1ff7313-107c-43e0-b19f-80311417dacf-config-volume\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.349491 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1ff7313-107c-43e0-b19f-80311417dacf-secret-volume\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.353877 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xw6pg\" (UniqueName: \"kubernetes.io/projected/b1ff7313-107c-43e0-b19f-80311417dacf-kube-api-access-xw6pg\") pod \"collect-profiles-29416005-mtb2j\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.561762 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:00 crc kubenswrapper[4961]: I1205 18:45:00.996966 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j"] Dec 05 18:45:01 crc kubenswrapper[4961]: W1205 18:45:01.003426 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb1ff7313_107c_43e0_b19f_80311417dacf.slice/crio-a8fd85ee58bb7603e11b27a64a93529883a1555d699434d59e33c5755a39e9cf WatchSource:0}: Error finding container a8fd85ee58bb7603e11b27a64a93529883a1555d699434d59e33c5755a39e9cf: Status 404 returned error can't find the container with id a8fd85ee58bb7603e11b27a64a93529883a1555d699434d59e33c5755a39e9cf Dec 05 18:45:01 crc kubenswrapper[4961]: I1205 18:45:01.048632 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" event={"ID":"b1ff7313-107c-43e0-b19f-80311417dacf","Type":"ContainerStarted","Data":"a8fd85ee58bb7603e11b27a64a93529883a1555d699434d59e33c5755a39e9cf"} Dec 05 18:45:02 crc kubenswrapper[4961]: I1205 18:45:02.059325 4961 generic.go:334] "Generic (PLEG): container finished" podID="b1ff7313-107c-43e0-b19f-80311417dacf" containerID="83f1d9d3f15cdf6f1a5b3acc6f218830eea77ac1b701ba7b70320bdac7060a93" exitCode=0 Dec 05 18:45:02 crc kubenswrapper[4961]: I1205 18:45:02.059388 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" event={"ID":"b1ff7313-107c-43e0-b19f-80311417dacf","Type":"ContainerDied","Data":"83f1d9d3f15cdf6f1a5b3acc6f218830eea77ac1b701ba7b70320bdac7060a93"} Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.434192 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.506102 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1ff7313-107c-43e0-b19f-80311417dacf-secret-volume\") pod \"b1ff7313-107c-43e0-b19f-80311417dacf\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.506157 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1ff7313-107c-43e0-b19f-80311417dacf-config-volume\") pod \"b1ff7313-107c-43e0-b19f-80311417dacf\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.506228 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xw6pg\" (UniqueName: \"kubernetes.io/projected/b1ff7313-107c-43e0-b19f-80311417dacf-kube-api-access-xw6pg\") pod \"b1ff7313-107c-43e0-b19f-80311417dacf\" (UID: \"b1ff7313-107c-43e0-b19f-80311417dacf\") " Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.506936 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b1ff7313-107c-43e0-b19f-80311417dacf-config-volume" (OuterVolumeSpecName: "config-volume") pod "b1ff7313-107c-43e0-b19f-80311417dacf" (UID: "b1ff7313-107c-43e0-b19f-80311417dacf"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.511839 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b1ff7313-107c-43e0-b19f-80311417dacf-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "b1ff7313-107c-43e0-b19f-80311417dacf" (UID: "b1ff7313-107c-43e0-b19f-80311417dacf"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.520527 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b1ff7313-107c-43e0-b19f-80311417dacf-kube-api-access-xw6pg" (OuterVolumeSpecName: "kube-api-access-xw6pg") pod "b1ff7313-107c-43e0-b19f-80311417dacf" (UID: "b1ff7313-107c-43e0-b19f-80311417dacf"). InnerVolumeSpecName "kube-api-access-xw6pg". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.608495 4961 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/b1ff7313-107c-43e0-b19f-80311417dacf-secret-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.608541 4961 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b1ff7313-107c-43e0-b19f-80311417dacf-config-volume\") on node \"crc\" DevicePath \"\"" Dec 05 18:45:03 crc kubenswrapper[4961]: I1205 18:45:03.608559 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xw6pg\" (UniqueName: \"kubernetes.io/projected/b1ff7313-107c-43e0-b19f-80311417dacf-kube-api-access-xw6pg\") on node \"crc\" DevicePath \"\"" Dec 05 18:45:04 crc kubenswrapper[4961]: I1205 18:45:04.083474 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" event={"ID":"b1ff7313-107c-43e0-b19f-80311417dacf","Type":"ContainerDied","Data":"a8fd85ee58bb7603e11b27a64a93529883a1555d699434d59e33c5755a39e9cf"} Dec 05 18:45:04 crc kubenswrapper[4961]: I1205 18:45:04.083888 4961 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8fd85ee58bb7603e11b27a64a93529883a1555d699434d59e33c5755a39e9cf" Dec 05 18:45:04 crc kubenswrapper[4961]: I1205 18:45:04.083579 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29416005-mtb2j" Dec 05 18:45:04 crc kubenswrapper[4961]: I1205 18:45:04.531982 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh"] Dec 05 18:45:04 crc kubenswrapper[4961]: I1205 18:45:04.545123 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29415960-7rgnh"] Dec 05 18:45:04 crc kubenswrapper[4961]: I1205 18:45:04.882285 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ceb51243-ac57-4363-b900-2f493c6a526a" path="/var/lib/kubelet/pods/ceb51243-ac57-4363-b900-2f493c6a526a/volumes" Dec 05 18:45:27 crc kubenswrapper[4961]: I1205 18:45:27.245232 4961 patch_prober.go:28] interesting pod/machine-config-daemon-4vc27 container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Dec 05 18:45:27 crc kubenswrapper[4961]: I1205 18:45:27.245830 4961 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Dec 05 18:45:27 crc kubenswrapper[4961]: I1205 18:45:27.245879 4961 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" Dec 05 18:45:27 crc kubenswrapper[4961]: I1205 18:45:27.246576 4961 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32"} pod="openshift-machine-config-operator/machine-config-daemon-4vc27" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Dec 05 18:45:27 crc kubenswrapper[4961]: I1205 18:45:27.246621 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" containerName="machine-config-daemon" containerID="cri-o://4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" gracePeriod=600 Dec 05 18:45:27 crc kubenswrapper[4961]: E1205 18:45:27.375142 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:45:28 crc kubenswrapper[4961]: I1205 18:45:28.323990 4961 generic.go:334] "Generic (PLEG): container finished" podID="c048c267-061b-479b-9d63-b3aee093d9f6" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" exitCode=0 Dec 05 18:45:28 crc kubenswrapper[4961]: I1205 18:45:28.324045 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" event={"ID":"c048c267-061b-479b-9d63-b3aee093d9f6","Type":"ContainerDied","Data":"4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32"} Dec 05 18:45:28 crc kubenswrapper[4961]: I1205 18:45:28.324120 4961 scope.go:117] "RemoveContainer" containerID="f1a110177a9eeba85f570ef0c42f98bf4014c9fcc0be0c4a67f6cf96d7337c8c" Dec 05 18:45:28 crc kubenswrapper[4961]: I1205 18:45:28.324953 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:45:28 crc kubenswrapper[4961]: E1205 18:45:28.325186 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:45:41 crc kubenswrapper[4961]: I1205 18:45:41.863703 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:45:41 crc kubenswrapper[4961]: E1205 18:45:41.864439 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:45:46 crc kubenswrapper[4961]: I1205 18:45:46.009607 4961 scope.go:117] "RemoveContainer" containerID="b82863ff3c4580f3e601ebcb8588270111e76323a0d3aef7d32b20891ba8dc56" Dec 05 18:45:46 crc kubenswrapper[4961]: I1205 18:45:46.044748 4961 scope.go:117] "RemoveContainer" containerID="914cc5ec897e30f8173e466106c4a3524d3dbb65e5bc2dfbde6cf03661ab3d5c" Dec 05 18:45:56 crc kubenswrapper[4961]: I1205 18:45:56.863897 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:45:56 crc kubenswrapper[4961]: E1205 18:45:56.864919 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:46:07 crc kubenswrapper[4961]: I1205 18:46:07.863502 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:46:07 crc kubenswrapper[4961]: E1205 18:46:07.864342 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:46:20 crc kubenswrapper[4961]: I1205 18:46:20.876232 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:46:20 crc kubenswrapper[4961]: E1205 18:46:20.877269 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:46:34 crc kubenswrapper[4961]: I1205 18:46:34.873717 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:46:34 crc kubenswrapper[4961]: E1205 18:46:34.874668 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.443213 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-jwmbf"] Dec 05 18:46:41 crc kubenswrapper[4961]: E1205 18:46:41.444444 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b1ff7313-107c-43e0-b19f-80311417dacf" containerName="collect-profiles" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.444466 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="b1ff7313-107c-43e0-b19f-80311417dacf" containerName="collect-profiles" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.444739 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="b1ff7313-107c-43e0-b19f-80311417dacf" containerName="collect-profiles" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.447008 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.459139 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jwmbf"] Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.558889 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kr8bv\" (UniqueName: \"kubernetes.io/projected/e822927a-602f-4c8f-b2ba-78fc147e9128-kube-api-access-kr8bv\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.559023 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-utilities\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.559369 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-catalog-content\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.661272 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-utilities\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.661384 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-catalog-content\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.661506 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kr8bv\" (UniqueName: \"kubernetes.io/projected/e822927a-602f-4c8f-b2ba-78fc147e9128-kube-api-access-kr8bv\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.662058 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-utilities\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.662075 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-catalog-content\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.684357 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kr8bv\" (UniqueName: \"kubernetes.io/projected/e822927a-602f-4c8f-b2ba-78fc147e9128-kube-api-access-kr8bv\") pod \"certified-operators-jwmbf\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:41 crc kubenswrapper[4961]: I1205 18:46:41.784532 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:42 crc kubenswrapper[4961]: I1205 18:46:42.323655 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-jwmbf"] Dec 05 18:46:43 crc kubenswrapper[4961]: I1205 18:46:43.148117 4961 generic.go:334] "Generic (PLEG): container finished" podID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerID="4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95" exitCode=0 Dec 05 18:46:43 crc kubenswrapper[4961]: I1205 18:46:43.148173 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerDied","Data":"4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95"} Dec 05 18:46:43 crc kubenswrapper[4961]: I1205 18:46:43.148403 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerStarted","Data":"d0bd5d479a00cf5ff6deecdf0718a331aee22ab67266a74deea035a3e861db01"} Dec 05 18:46:44 crc kubenswrapper[4961]: I1205 18:46:44.161869 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerStarted","Data":"936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355"} Dec 05 18:46:45 crc kubenswrapper[4961]: I1205 18:46:45.175608 4961 generic.go:334] "Generic (PLEG): container finished" podID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerID="936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355" exitCode=0 Dec 05 18:46:45 crc kubenswrapper[4961]: I1205 18:46:45.175648 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerDied","Data":"936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355"} Dec 05 18:46:46 crc kubenswrapper[4961]: I1205 18:46:46.125175 4961 scope.go:117] "RemoveContainer" containerID="3adc0870759e73be2c9c7a9ebaef101d5b6bcefcd45789a0171f5ac2f1faa299" Dec 05 18:46:46 crc kubenswrapper[4961]: I1205 18:46:46.191725 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerStarted","Data":"89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee"} Dec 05 18:46:46 crc kubenswrapper[4961]: I1205 18:46:46.238041 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-jwmbf" podStartSLOduration=2.799071399 podStartE2EDuration="5.237975459s" podCreationTimestamp="2025-12-05 18:46:41 +0000 UTC" firstStartedPulling="2025-12-05 18:46:43.151377005 +0000 UTC m=+4409.212527528" lastFinishedPulling="2025-12-05 18:46:45.590281105 +0000 UTC m=+4411.651431588" observedRunningTime="2025-12-05 18:46:46.219838246 +0000 UTC m=+4412.280988759" watchObservedRunningTime="2025-12-05 18:46:46.237975459 +0000 UTC m=+4412.299125992" Dec 05 18:46:47 crc kubenswrapper[4961]: I1205 18:46:47.864077 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:46:47 crc kubenswrapper[4961]: E1205 18:46:47.864631 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:46:51 crc kubenswrapper[4961]: I1205 18:46:51.786087 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:51 crc kubenswrapper[4961]: I1205 18:46:51.787161 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:51 crc kubenswrapper[4961]: I1205 18:46:51.873138 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:52 crc kubenswrapper[4961]: I1205 18:46:52.335173 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:52 crc kubenswrapper[4961]: I1205 18:46:52.396960 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jwmbf"] Dec 05 18:46:54 crc kubenswrapper[4961]: I1205 18:46:54.275830 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-jwmbf" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="registry-server" containerID="cri-o://89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee" gracePeriod=2 Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.800040 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.861984 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kr8bv\" (UniqueName: \"kubernetes.io/projected/e822927a-602f-4c8f-b2ba-78fc147e9128-kube-api-access-kr8bv\") pod \"e822927a-602f-4c8f-b2ba-78fc147e9128\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.862343 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-utilities\") pod \"e822927a-602f-4c8f-b2ba-78fc147e9128\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.862421 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-catalog-content\") pod \"e822927a-602f-4c8f-b2ba-78fc147e9128\" (UID: \"e822927a-602f-4c8f-b2ba-78fc147e9128\") " Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.863267 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-utilities" (OuterVolumeSpecName: "utilities") pod "e822927a-602f-4c8f-b2ba-78fc147e9128" (UID: "e822927a-602f-4c8f-b2ba-78fc147e9128"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.863530 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.868855 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e822927a-602f-4c8f-b2ba-78fc147e9128-kube-api-access-kr8bv" (OuterVolumeSpecName: "kube-api-access-kr8bv") pod "e822927a-602f-4c8f-b2ba-78fc147e9128" (UID: "e822927a-602f-4c8f-b2ba-78fc147e9128"). InnerVolumeSpecName "kube-api-access-kr8bv". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.919820 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e822927a-602f-4c8f-b2ba-78fc147e9128" (UID: "e822927a-602f-4c8f-b2ba-78fc147e9128"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.966606 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e822927a-602f-4c8f-b2ba-78fc147e9128-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:54.966641 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kr8bv\" (UniqueName: \"kubernetes.io/projected/e822927a-602f-4c8f-b2ba-78fc147e9128-kube-api-access-kr8bv\") on node \"crc\" DevicePath \"\"" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.295741 4961 generic.go:334] "Generic (PLEG): container finished" podID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerID="89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee" exitCode=0 Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.295812 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerDied","Data":"89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee"} Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.295852 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-jwmbf" event={"ID":"e822927a-602f-4c8f-b2ba-78fc147e9128","Type":"ContainerDied","Data":"d0bd5d479a00cf5ff6deecdf0718a331aee22ab67266a74deea035a3e861db01"} Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.295874 4961 scope.go:117] "RemoveContainer" containerID="89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.295881 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-jwmbf" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.320677 4961 scope.go:117] "RemoveContainer" containerID="936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.360113 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-jwmbf"] Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.372518 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-jwmbf"] Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.381457 4961 scope.go:117] "RemoveContainer" containerID="4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.403761 4961 scope.go:117] "RemoveContainer" containerID="89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee" Dec 05 18:46:55 crc kubenswrapper[4961]: E1205 18:46:55.404498 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee\": container with ID starting with 89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee not found: ID does not exist" containerID="89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.404553 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee"} err="failed to get container status \"89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee\": rpc error: code = NotFound desc = could not find container \"89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee\": container with ID starting with 89e59873d7837bb3a63ff885c0b7acf8dea92e8cf5353071f6fe8e981e79a0ee not found: ID does not exist" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.404651 4961 scope.go:117] "RemoveContainer" containerID="936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355" Dec 05 18:46:55 crc kubenswrapper[4961]: E1205 18:46:55.405117 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355\": container with ID starting with 936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355 not found: ID does not exist" containerID="936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.405155 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355"} err="failed to get container status \"936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355\": rpc error: code = NotFound desc = could not find container \"936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355\": container with ID starting with 936a90bc68d3f3af3c4eab459da71c7cbc8fdadd8d4747e746e89c70c92e2355 not found: ID does not exist" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.405181 4961 scope.go:117] "RemoveContainer" containerID="4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95" Dec 05 18:46:55 crc kubenswrapper[4961]: E1205 18:46:55.405619 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95\": container with ID starting with 4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95 not found: ID does not exist" containerID="4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95" Dec 05 18:46:55 crc kubenswrapper[4961]: I1205 18:46:55.405679 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95"} err="failed to get container status \"4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95\": rpc error: code = NotFound desc = could not find container \"4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95\": container with ID starting with 4540237c2930feef92ef4a34b369acca91efcec2325854764541c68a744bed95 not found: ID does not exist" Dec 05 18:46:56 crc kubenswrapper[4961]: I1205 18:46:56.885541 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" path="/var/lib/kubelet/pods/e822927a-602f-4c8f-b2ba-78fc147e9128/volumes" Dec 05 18:46:59 crc kubenswrapper[4961]: I1205 18:46:59.863637 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:46:59 crc kubenswrapper[4961]: E1205 18:46:59.864319 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:47:14 crc kubenswrapper[4961]: I1205 18:47:14.879249 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:47:14 crc kubenswrapper[4961]: E1205 18:47:14.880689 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:47:25 crc kubenswrapper[4961]: I1205 18:47:25.863960 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:47:25 crc kubenswrapper[4961]: E1205 18:47:25.870540 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.405519 4961 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-29kr9"] Dec 05 18:47:32 crc kubenswrapper[4961]: E1205 18:47:32.406689 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="extract-utilities" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.406711 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="extract-utilities" Dec 05 18:47:32 crc kubenswrapper[4961]: E1205 18:47:32.406743 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="extract-content" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.406756 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="extract-content" Dec 05 18:47:32 crc kubenswrapper[4961]: E1205 18:47:32.406844 4961 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="registry-server" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.406860 4961 state_mem.go:107] "Deleted CPUSet assignment" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="registry-server" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.407189 4961 memory_manager.go:354] "RemoveStaleState removing state" podUID="e822927a-602f-4c8f-b2ba-78fc147e9128" containerName="registry-server" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.409690 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.425647 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-29kr9"] Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.507760 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xpjx8\" (UniqueName: \"kubernetes.io/projected/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-kube-api-access-xpjx8\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.507867 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-utilities\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.507921 4961 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-catalog-content\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.609352 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-catalog-content\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.609620 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xpjx8\" (UniqueName: \"kubernetes.io/projected/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-kube-api-access-xpjx8\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.609678 4961 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-utilities\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.610417 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-catalog-content\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.610493 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-utilities\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.638908 4961 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xpjx8\" (UniqueName: \"kubernetes.io/projected/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-kube-api-access-xpjx8\") pod \"community-operators-29kr9\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:32 crc kubenswrapper[4961]: I1205 18:47:32.746288 4961 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:33 crc kubenswrapper[4961]: I1205 18:47:33.234352 4961 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-29kr9"] Dec 05 18:47:33 crc kubenswrapper[4961]: W1205 18:47:33.239544 4961 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podfb54cb56_8b81_426a_820e_a4fcbf4bcb1e.slice/crio-733c88cb80c3b7ab422bf61287f9d75fdbb728c5e63c519daeadb0a51dfb2ac5 WatchSource:0}: Error finding container 733c88cb80c3b7ab422bf61287f9d75fdbb728c5e63c519daeadb0a51dfb2ac5: Status 404 returned error can't find the container with id 733c88cb80c3b7ab422bf61287f9d75fdbb728c5e63c519daeadb0a51dfb2ac5 Dec 05 18:47:33 crc kubenswrapper[4961]: I1205 18:47:33.684933 4961 generic.go:334] "Generic (PLEG): container finished" podID="fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" containerID="418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95" exitCode=0 Dec 05 18:47:33 crc kubenswrapper[4961]: I1205 18:47:33.684975 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerDied","Data":"418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95"} Dec 05 18:47:33 crc kubenswrapper[4961]: I1205 18:47:33.684999 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerStarted","Data":"733c88cb80c3b7ab422bf61287f9d75fdbb728c5e63c519daeadb0a51dfb2ac5"} Dec 05 18:47:34 crc kubenswrapper[4961]: I1205 18:47:34.712848 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerStarted","Data":"51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71"} Dec 05 18:47:35 crc kubenswrapper[4961]: I1205 18:47:35.727125 4961 generic.go:334] "Generic (PLEG): container finished" podID="fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" containerID="51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71" exitCode=0 Dec 05 18:47:35 crc kubenswrapper[4961]: I1205 18:47:35.728208 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerDied","Data":"51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71"} Dec 05 18:47:36 crc kubenswrapper[4961]: I1205 18:47:36.755552 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerStarted","Data":"327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e"} Dec 05 18:47:36 crc kubenswrapper[4961]: I1205 18:47:36.778754 4961 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-29kr9" podStartSLOduration=2.336005049 podStartE2EDuration="4.778729183s" podCreationTimestamp="2025-12-05 18:47:32 +0000 UTC" firstStartedPulling="2025-12-05 18:47:33.687366182 +0000 UTC m=+4459.748516655" lastFinishedPulling="2025-12-05 18:47:36.130090316 +0000 UTC m=+4462.191240789" observedRunningTime="2025-12-05 18:47:36.777048122 +0000 UTC m=+4462.838198615" watchObservedRunningTime="2025-12-05 18:47:36.778729183 +0000 UTC m=+4462.839879656" Dec 05 18:47:37 crc kubenswrapper[4961]: I1205 18:47:37.863880 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:47:37 crc kubenswrapper[4961]: E1205 18:47:37.864525 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:47:42 crc kubenswrapper[4961]: I1205 18:47:42.746412 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:42 crc kubenswrapper[4961]: I1205 18:47:42.746933 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:42 crc kubenswrapper[4961]: I1205 18:47:42.832900 4961 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:42 crc kubenswrapper[4961]: I1205 18:47:42.891274 4961 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:43 crc kubenswrapper[4961]: I1205 18:47:43.076332 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-29kr9"] Dec 05 18:47:44 crc kubenswrapper[4961]: I1205 18:47:44.854901 4961 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-29kr9" podUID="fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" containerName="registry-server" containerID="cri-o://327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e" gracePeriod=2 Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.357801 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.494469 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xpjx8\" (UniqueName: \"kubernetes.io/projected/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-kube-api-access-xpjx8\") pod \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.494581 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-catalog-content\") pod \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.494646 4961 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-utilities\") pod \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\" (UID: \"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e\") " Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.495521 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-utilities" (OuterVolumeSpecName: "utilities") pod "fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" (UID: "fb54cb56-8b81-426a-820e-a4fcbf4bcb1e"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.502628 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-kube-api-access-xpjx8" (OuterVolumeSpecName: "kube-api-access-xpjx8") pod "fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" (UID: "fb54cb56-8b81-426a-820e-a4fcbf4bcb1e"). InnerVolumeSpecName "kube-api-access-xpjx8". PluginName "kubernetes.io/projected", VolumeGidValue "" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.552432 4961 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" (UID: "fb54cb56-8b81-426a-820e-a4fcbf4bcb1e"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.597402 4961 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xpjx8\" (UniqueName: \"kubernetes.io/projected/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-kube-api-access-xpjx8\") on node \"crc\" DevicePath \"\"" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.597467 4961 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-catalog-content\") on node \"crc\" DevicePath \"\"" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.597479 4961 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e-utilities\") on node \"crc\" DevicePath \"\"" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.866855 4961 generic.go:334] "Generic (PLEG): container finished" podID="fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" containerID="327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e" exitCode=0 Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.866892 4961 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-29kr9" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.866915 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerDied","Data":"327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e"} Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.866950 4961 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-29kr9" event={"ID":"fb54cb56-8b81-426a-820e-a4fcbf4bcb1e","Type":"ContainerDied","Data":"733c88cb80c3b7ab422bf61287f9d75fdbb728c5e63c519daeadb0a51dfb2ac5"} Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.866975 4961 scope.go:117] "RemoveContainer" containerID="327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.913078 4961 scope.go:117] "RemoveContainer" containerID="51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.920474 4961 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-29kr9"] Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.928696 4961 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-29kr9"] Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.932818 4961 scope.go:117] "RemoveContainer" containerID="418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.983585 4961 scope.go:117] "RemoveContainer" containerID="327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e" Dec 05 18:47:45 crc kubenswrapper[4961]: E1205 18:47:45.984590 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e\": container with ID starting with 327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e not found: ID does not exist" containerID="327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.984662 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e"} err="failed to get container status \"327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e\": rpc error: code = NotFound desc = could not find container \"327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e\": container with ID starting with 327f512d66bc7ab80b8e41a2f5da84a2215cacda4d6b689b80a526e935834e7e not found: ID does not exist" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.984713 4961 scope.go:117] "RemoveContainer" containerID="51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71" Dec 05 18:47:45 crc kubenswrapper[4961]: E1205 18:47:45.985710 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71\": container with ID starting with 51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71 not found: ID does not exist" containerID="51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.985745 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71"} err="failed to get container status \"51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71\": rpc error: code = NotFound desc = could not find container \"51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71\": container with ID starting with 51bbc8c71872a6e8423307c523f00258ca96e55beab9583d497cb6121daa0b71 not found: ID does not exist" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.985761 4961 scope.go:117] "RemoveContainer" containerID="418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95" Dec 05 18:47:45 crc kubenswrapper[4961]: E1205 18:47:45.987425 4961 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95\": container with ID starting with 418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95 not found: ID does not exist" containerID="418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95" Dec 05 18:47:45 crc kubenswrapper[4961]: I1205 18:47:45.987474 4961 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95"} err="failed to get container status \"418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95\": rpc error: code = NotFound desc = could not find container \"418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95\": container with ID starting with 418793103bbaf0bc4e0e507129798a0bced45eda620a2712dc15193fbd23ea95 not found: ID does not exist" Dec 05 18:47:46 crc kubenswrapper[4961]: I1205 18:47:46.873938 4961 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fb54cb56-8b81-426a-820e-a4fcbf4bcb1e" path="/var/lib/kubelet/pods/fb54cb56-8b81-426a-820e-a4fcbf4bcb1e/volumes" Dec 05 18:47:52 crc kubenswrapper[4961]: I1205 18:47:52.863952 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:47:52 crc kubenswrapper[4961]: E1205 18:47:52.865516 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:48:07 crc kubenswrapper[4961]: I1205 18:48:07.863025 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:48:07 crc kubenswrapper[4961]: E1205 18:48:07.863680 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:48:19 crc kubenswrapper[4961]: I1205 18:48:19.864665 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:48:19 crc kubenswrapper[4961]: E1205 18:48:19.865963 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" Dec 05 18:48:33 crc kubenswrapper[4961]: I1205 18:48:33.864066 4961 scope.go:117] "RemoveContainer" containerID="4ede08b8bde2382d630f94de033c71c18a6c1b31a25c85ecd02cbd8429e97b32" Dec 05 18:48:33 crc kubenswrapper[4961]: E1205 18:48:33.864951 4961 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-4vc27_openshift-machine-config-operator(c048c267-061b-479b-9d63-b3aee093d9f6)\"" pod="openshift-machine-config-operator/machine-config-daemon-4vc27" podUID="c048c267-061b-479b-9d63-b3aee093d9f6" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515114624421024445 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015114624422017363 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015114613231016502 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015114613231015452 5ustar corecore